Update deps
This commit is contained in:
parent
a6c01b9a50
commit
0e09735df7
|
@ -7,14 +7,14 @@
|
|||
".",
|
||||
"cmd/misspell"
|
||||
]
|
||||
revision = "59894abde931a32630d4e884a09c682ed20c5c7c"
|
||||
version = "v0.3.0"
|
||||
revision = "b90dc15cfd220ecf8bbc9043ecb928cef381f011"
|
||||
version = "v0.3.4"
|
||||
|
||||
[[projects]]
|
||||
branch = "v2"
|
||||
name = "gopkg.in/yaml.v2"
|
||||
packages = ["."]
|
||||
revision = "eb3733d160e74a9c7e442f435eb3bea458e1d19f"
|
||||
revision = "5420a8b6744d3b0345ab293f6fcba19c978f1183"
|
||||
version = "v2.2.1"
|
||||
|
||||
[solve-meta]
|
||||
analyzer-name = "dep"
|
||||
|
|
|
@ -1,33 +0,0 @@
|
|||
dist/
|
||||
bin/
|
||||
|
||||
# editor turds
|
||||
*~
|
||||
*.gz
|
||||
*.bz2
|
||||
*.csv
|
||||
|
||||
# Compiled Object files, Static and Dynamic libs (Shared Objects)
|
||||
*.o
|
||||
*.a
|
||||
*.so
|
||||
|
||||
# Folders
|
||||
_obj
|
||||
_test
|
||||
|
||||
# Architecture specific extensions/prefixes
|
||||
*.[568vq]
|
||||
[568vq].out
|
||||
|
||||
*.cgo1.go
|
||||
*.cgo2.c
|
||||
_cgo_defun.c
|
||||
_cgo_gotypes.go
|
||||
_cgo_export.*
|
||||
|
||||
_testmain.go
|
||||
|
||||
*.exe
|
||||
*.test
|
||||
*.prof
|
|
@ -1,11 +0,0 @@
|
|||
sudo: required
|
||||
dist: trusty
|
||||
language: go
|
||||
go:
|
||||
- 1.8.3
|
||||
git:
|
||||
depth: 1
|
||||
script:
|
||||
- make -e ci
|
||||
after_success:
|
||||
- test -n "$TRAVIS_TAG" && ./scripts/goreleaser.sh
|
|
@ -1,37 +0,0 @@
|
|||
FROM golang:1.8.1-alpine
|
||||
MAINTAINER https://github.com/client9/misspell
|
||||
|
||||
# cache buster
|
||||
RUN echo 3
|
||||
|
||||
# git is needed for "go get" below
|
||||
RUN apk add --no-cache git make
|
||||
|
||||
# these are my standard testing / linting tools
|
||||
RUN /bin/true \
|
||||
&& go get -u github.com/alecthomas/gometalinter \
|
||||
&& gometalinter --install \
|
||||
&& rm -rf /go/src /go/pkg
|
||||
#
|
||||
# * SCOWL word list
|
||||
#
|
||||
# Downloads
|
||||
# http://wordlist.aspell.net/dicts/
|
||||
# --> http://app.aspell.net/create
|
||||
#
|
||||
|
||||
# use en_US large size
|
||||
# use regular size for others
|
||||
ENV SOURCE_US_BIG http://app.aspell.net/create?max_size=70&spelling=US&max_variant=2&diacritic=both&special=hacker&special=roman-numerals&download=wordlist&encoding=utf-8&format=inline
|
||||
|
||||
# should be able tell difference between English variations using this
|
||||
ENV SOURCE_US http://app.aspell.net/create?max_size=60&spelling=US&max_variant=1&diacritic=both&download=wordlist&encoding=utf-8&format=inline
|
||||
ENV SOURCE_GB_ISE http://app.aspell.net/create?max_size=60&spelling=GBs&max_variant=2&diacritic=both&download=wordlist&encoding=utf-8&format=inline
|
||||
ENV SOURCE_GB_IZE http://app.aspell.net/create?max_size=60&spelling=GBz&max_variant=2&diacritic=both&download=wordlist&encoding=utf-8&format=inline
|
||||
ENV SOURCE_CA http://app.aspell.net/create?max_size=60&spelling=CA&max_variant=2&diacritic=both&download=wordlist&encoding=utf-8&format=inline
|
||||
|
||||
RUN /bin/true \
|
||||
&& mkdir /scowl-wl \
|
||||
&& wget -O /scowl-wl/words-US-60.txt ${SOURCE_US} \
|
||||
&& wget -O /scowl-wl/words-GB-ise-60.txt ${SOURCE_GB_ISE}
|
||||
|
|
@ -1,84 +0,0 @@
|
|||
CONTAINER=nickg/misspell
|
||||
|
||||
install: ## install misspell into GOPATH/bin
|
||||
go install ./cmd/misspell
|
||||
|
||||
build: hooks ## build and lint misspell
|
||||
go install ./cmd/misspell
|
||||
gometalinter \
|
||||
--vendor \
|
||||
--deadline=60s \
|
||||
--disable-all \
|
||||
--enable=vet \
|
||||
--enable=golint \
|
||||
--enable=gofmt \
|
||||
--enable=goimports \
|
||||
--enable=gosimple \
|
||||
--enable=staticcheck \
|
||||
--enable=ineffassign \
|
||||
--exclude=/usr/local/go/src/net/lookup_unix.go \
|
||||
./...
|
||||
go test .
|
||||
|
||||
test: ## run all tests
|
||||
go test .
|
||||
|
||||
# the grep in line 2 is to remove misspellings in the spelling dictionary
|
||||
# that trigger false positives!!
|
||||
falsepositives: /scowl-wl
|
||||
cat /scowl-wl/words-US-60.txt | \
|
||||
grep -i -v -E "payed|Tyre|Euclidian|nonoccurence|dependancy|reenforced|accidently|surprize|dependance|idealogy|binominal|causalities|conquerer|withing|casette|analyse|analogue|dialogue|paralyse|catalogue|archaeolog|clarinettist|catalyses|cancell|chisell|ageing|cataloguing" | \
|
||||
misspell -debug -error
|
||||
cat /scowl-wl/words-GB-ise-60.txt | \
|
||||
grep -v -E "payed|nonoccurence|withing" | \
|
||||
misspell -locale=UK -debug -error
|
||||
# cat /scowl-wl/words-GB-ize-60.txt | \
|
||||
# grep -v -E "withing" | \
|
||||
# misspell -debug -error
|
||||
# cat /scowl-wl/words-CA-60.txt | \
|
||||
# grep -v -E "withing" | \
|
||||
# misspell -debug -error
|
||||
|
||||
bench: ## run benchmarks
|
||||
go test -bench '.*'
|
||||
|
||||
clean: ## clean up time
|
||||
rm -rf dist/ bin/
|
||||
go clean ./...
|
||||
git gc --aggressive
|
||||
|
||||
ci: ## run test like travis-ci does, requires docker
|
||||
docker run --rm \
|
||||
-v $(PWD):/go/src/github.com/client9/misspell \
|
||||
-w /go/src/github.com/client9/misspell \
|
||||
${CONTAINER} \
|
||||
make build falsepositives
|
||||
|
||||
docker-build: ## build a docker test image
|
||||
docker build -t ${CONTAINER} .
|
||||
|
||||
docker-pull: ## pull latest test image
|
||||
docker pull ${CONTAINER}
|
||||
|
||||
docker-console: ## log into the test image
|
||||
docker run --rm -it \
|
||||
-v $(PWD):/go/src/github.com/client9/misspell \
|
||||
-w /go/src/github.com/client9/misspell \
|
||||
${CONTAINER} sh
|
||||
|
||||
.git/hooks/pre-commit: scripts/pre-commit.sh
|
||||
cp -f scripts/pre-commit.sh .git/hooks/pre-commit
|
||||
.git/hooks/commit-msg: scripts/commit-msg.sh
|
||||
cp -f scripts/commit-msg.sh .git/hooks/commit-msg
|
||||
hooks: .git/hooks/pre-commit .git/hooks/commit-msg ## install git precommit hooks
|
||||
|
||||
.PHONY: help ci console docker-build bench
|
||||
|
||||
# https://www.client9.com/self-documenting-makefiles/
|
||||
help:
|
||||
@awk -F ':|##' '/^[^\t].+?:.*?##/ {\
|
||||
printf "\033[36m%-30s\033[0m %s\n", $$1, $$NF \
|
||||
}' $(MAKEFILE_LIST)
|
||||
.DEFAULT_GOAL=help
|
||||
.PHONY=help
|
||||
|
|
@ -1,416 +0,0 @@
|
|||
[](https://travis-ci.org/client9/misspell) [](https://goreportcard.com/report/github.com/client9/misspell) [](https://godoc.org/github.com/client9/misspell) [](http://gocover.io/github.com/client9/misspell) [](https://raw.githubusercontent.com/client9/misspell/master/LICENSE)
|
||||
|
||||
Correct commonly misspelled English words... quickly.
|
||||
|
||||
### Install
|
||||
|
||||
|
||||
If you just want a binary and to start using `misspell`:
|
||||
|
||||
```
|
||||
curl -L -o ./install-misspell.sh https://git.io/misspell
|
||||
sh ./install-misspell.sh
|
||||
```
|
||||
|
||||
will install as `./bin/misspell`. You can adjust the download location using the `-b` flag. File a ticket if you want another platform supported.
|
||||
|
||||
|
||||
If you use [Go](https://golang.org/), the best way to run `misspell` is by using [gometalinter](#gometalinter). Otherwise, install `misspell` the old-fashioned way:
|
||||
|
||||
```
|
||||
go get -u github.com/client9/misspell/cmd/misspell
|
||||
```
|
||||
|
||||
and misspell will be in your `GOPATH`
|
||||
|
||||
### Usage
|
||||
|
||||
|
||||
```bash
|
||||
$ misspell all.html your.txt important.md files.go
|
||||
your.txt:42:10 found "langauge" a misspelling of "language"
|
||||
|
||||
# ^ file, line, column
|
||||
```
|
||||
|
||||
```
|
||||
$ misspell -help
|
||||
Usage of misspell:
|
||||
-debug
|
||||
Debug matching, very slow
|
||||
-error
|
||||
Exit with 2 if misspelling found
|
||||
-f string
|
||||
'csv', 'sqlite3' or custom Golang template for output
|
||||
-i string
|
||||
ignore the following corrections, comma separated
|
||||
-j int
|
||||
Number of workers, 0 = number of CPUs
|
||||
-legal
|
||||
Show legal information and exit
|
||||
-locale string
|
||||
Correct spellings using locale perferances for US or UK. Default is to use a neutral variety of English. Setting locale to US will correct the British spelling of 'colour' to 'color'
|
||||
-o string
|
||||
output file or [stderr|stdout|] (default "stdout")
|
||||
-q Do not emit misspelling output
|
||||
-source string
|
||||
Source mode: auto=guess, go=golang source, text=plain or markdown-like text (default "auto")
|
||||
-w Overwrite file with corrections (default is just to display)
|
||||
```
|
||||
|
||||
## FAQ
|
||||
|
||||
* [Automatic Corrections](#correct)
|
||||
* [Converting UK spellings to US](#locale)
|
||||
* [Using pipes and stdin](#stdin)
|
||||
* [Golang special support](#golang)
|
||||
* [gometalinter support](#gometalinter)
|
||||
* [CSV Output](#csv)
|
||||
* [Using SQLite3](#sqlite)
|
||||
* [Changing output format](#output)
|
||||
* [Checking a folder recursively](#recursive)
|
||||
* [Performance](#performance)
|
||||
* [Known Issues](#issues)
|
||||
* [Debugging](#debug)
|
||||
* [False Negatives and missing words](#missing)
|
||||
* [Origin of Word Lists](#words)
|
||||
* [Software License](#license)
|
||||
* [Problem statement](#problem)
|
||||
* [Other spelling correctors](#others)
|
||||
* [Other ideas](#otherideas)
|
||||
|
||||
<a name="correct"></a>
|
||||
### How can I make the corrections automatically?
|
||||
|
||||
Just add the `-w` flag!
|
||||
|
||||
```
|
||||
$ misspell -w all.html your.txt important.md files.go
|
||||
your.txt:9:21:corrected "langauge" to "language"
|
||||
|
||||
# ^booyah
|
||||
```
|
||||
|
||||
<a name="locale"></a>
|
||||
### How do I convert British spellings to American (or vice-versa)?
|
||||
|
||||
Add the `-locale US` flag!
|
||||
|
||||
```bash
|
||||
$ misspell -locale US important.txt
|
||||
important.txt:10:20 found "colour" a misspelling of "color"
|
||||
```
|
||||
|
||||
Add the `-locale UK` flag!
|
||||
|
||||
```bash
|
||||
$ echo "My favorite color is blue" | misspell -locale UK
|
||||
stdin:1:3:found "favorite color" a misspelling of "favourite colour"
|
||||
```
|
||||
|
||||
Help is appreciated as I'm neither British nor an
|
||||
expert in the English language.
|
||||
|
||||
<a name="recursive"></a>
|
||||
### How do you check an entire folder recursively?
|
||||
|
||||
Just list a directory you'd like to check
|
||||
|
||||
```bash
|
||||
misspell .
|
||||
misspell aDirectory anotherDirectory aFile
|
||||
```
|
||||
|
||||
You can also run misspell recursively using the following shell tricks:
|
||||
|
||||
```bash
|
||||
misspell directory/**/*
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```bash
|
||||
find . -type f | xargs misspell
|
||||
```
|
||||
|
||||
You can select a type of file as well. The following examples selects all `.txt` files that are *not* in the `vendor` directory:
|
||||
|
||||
```bash
|
||||
find . -type f -name '*.txt' | grep -v vendor/ | xargs misspell -error
|
||||
```
|
||||
|
||||
<a name="stdin"></a>
|
||||
### Can I use pipes or `stdin` for input?
|
||||
|
||||
Yes!
|
||||
|
||||
Print messages to `stderr` only:
|
||||
|
||||
```bash
|
||||
$ echo "zeebra" | misspell
|
||||
stdin:1:0:found "zeebra" a misspelling of "zebra"
|
||||
```
|
||||
|
||||
Print messages to `stderr`, and corrected text to `stdout`:
|
||||
|
||||
```bash
|
||||
$ echo "zeebra" | misspell -w
|
||||
stdin:1:0:corrected "zeebra" to "zebra"
|
||||
zebra
|
||||
```
|
||||
|
||||
Only print the corrected text to `stdout`:
|
||||
|
||||
```bash
|
||||
$ echo "zeebra" | misspell -w -q
|
||||
zebra
|
||||
```
|
||||
|
||||
<a name="golang"></a>
|
||||
### Are there special rules for golang source files?
|
||||
|
||||
Yes! If the file ends in `.go`, then misspell will only check spelling in
|
||||
comments.
|
||||
|
||||
If you want to force a file to be checked as a golang source, use `-source=go`
|
||||
on the command line. Conversely, you can check a golang source as if it were
|
||||
pure text by using `-source=text`. You might want to do this since many
|
||||
variable names have misspellings in them!
|
||||
|
||||
### Can I check only-comments in other other programming languages?
|
||||
|
||||
I'm told the using `-source=go` works well for ruby, javascript, java, c and
|
||||
c++.
|
||||
|
||||
It doesn't work well for python and bash.
|
||||
|
||||
<a name="gometalinter"></a>
|
||||
### Does this work with gometalinter?
|
||||
|
||||
[gometalinter](https://github.com/alecthomas/gometalinter) runs
|
||||
multiple golang linters. Starting on [2016-06-12](https://github.com/alecthomas/gometalinter/pull/134)
|
||||
gometalinter supports `misspell` natively but it is disabled by default.
|
||||
|
||||
```bash
|
||||
# update your copy of gometalinter
|
||||
go get -u github.com/alecthomas/gometalinter
|
||||
|
||||
# install updates and misspell
|
||||
gometalinter --install --update
|
||||
```
|
||||
|
||||
To use, just enable `misspell`
|
||||
|
||||
```
|
||||
gometalinter --enable misspell ./...
|
||||
```
|
||||
|
||||
Note that gometalinter only checks golang files, and uses the default options
|
||||
of `misspell`
|
||||
|
||||
You may wish to run this on your plaintext (.txt) and/or markdown files too.
|
||||
|
||||
|
||||
<a name="csv"></a>
|
||||
### How Can I Get CSV Output?
|
||||
|
||||
Using `-f csv`, the output is standard comma-seprated values with headers in the first row.
|
||||
|
||||
```
|
||||
misspell -f csv *
|
||||
file,line,column,typo,corrected
|
||||
"README.md",9,22,langauge,language
|
||||
"README.md",47,25,langauge,language
|
||||
```
|
||||
|
||||
<a name="sqlite"></a>
|
||||
### How can I export to SQLite3?
|
||||
|
||||
Using `-f sqlite`, the output is a [sqlite3](https://www.sqlite.org/index.html) dump-file.
|
||||
|
||||
```bash
|
||||
$ misspell -f sqlite * > /tmp/misspell.sql
|
||||
$ cat /tmp/misspell.sql
|
||||
|
||||
PRAGMA foreign_keys=OFF;
|
||||
BEGIN TRANSACTION;
|
||||
CREATE TABLE misspell(
|
||||
"file" TEXT,
|
||||
"line" INTEGER,i
|
||||
"column" INTEGER,i
|
||||
"typo" TEXT,
|
||||
"corrected" TEXT
|
||||
);
|
||||
INSERT INTO misspell VALUES("install.txt",202,31,"immediatly","immediately");
|
||||
# etc...
|
||||
COMMIT;
|
||||
```
|
||||
|
||||
```bash
|
||||
$ sqlite3 -init /tmp/misspell.sql :memory: 'select count(*) from misspell'
|
||||
1
|
||||
```
|
||||
|
||||
With some tricks you can directly pipe output to sqlite3 by using `-init /dev/stdin`:
|
||||
|
||||
```
|
||||
misspell -f sqlite * | sqlite3 -init /dev/stdin -column -cmd '.width 60 15' ':memory' \
|
||||
'select substr(file,35),typo,count(*) as count from misspell group by file, typo order by count desc;'
|
||||
```
|
||||
|
||||
<a name="ignore"></a>
|
||||
### How can I ignore rules?
|
||||
|
||||
Using the `-i "comma,separated,rules"` flag you can specify corrections to ignore.
|
||||
|
||||
For example, if you were to run `misspell -w -error -source=text` against document that contains the string `Guy Finkelshteyn Braswell`, misspell would change the text to `Guy Finkelstheyn Bras well`. You can then
|
||||
determine the rules to ignore by reverting the change and running the with the `-debug` flag. You can then see
|
||||
that the corrections were `htey -> they` and `aswell -> as well`. To ignore these two rules, you add `-i "htey,aswell"` to
|
||||
your command. With debug mode on, you can see it print the corrections, but it will no longer make them.
|
||||
|
||||
<a name="output"></a>
|
||||
### How can I change the output format?
|
||||
|
||||
Using the `-f template` flag you can pass in a
|
||||
[golang text template](https://golang.org/pkg/text/template/) to format the output.
|
||||
|
||||
One can use `printf "%q" VALUE` to safely quote a value.
|
||||
|
||||
The default template is compatible with [gometalinter](https://github.com/alecthomas/gometalinter)
|
||||
```
|
||||
{{ .Filename }}:{{ .Line }}:{{ .Column }}:corrected {{ printf "%q" .Original }} to "{{ printf "%q" .Corrected }}"
|
||||
```
|
||||
|
||||
To just print probable misspellings:
|
||||
|
||||
```
|
||||
-f '{{ .Original }}'
|
||||
```
|
||||
|
||||
<a name="problem"></a>
|
||||
### What problem does this solve?
|
||||
|
||||
This corrects commonly misspelled English words in computer source
|
||||
code, and other text-based formats (`.txt`, `.md`, etc).
|
||||
|
||||
It is designed to run quickly so it can be
|
||||
used as a [pre-commit hook](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks)
|
||||
with minimal burden on the developer.
|
||||
|
||||
It does not work with binary formats (e.g. Word, etc).
|
||||
|
||||
It is not a complete spell-checking program nor a grammar checker.
|
||||
|
||||
<a name="others"></a>
|
||||
### What are other misspelling correctors and what's wrong with them?
|
||||
|
||||
Some other misspelling correctors:
|
||||
|
||||
* https://github.com/vlajos/misspell_fixer
|
||||
* https://github.com/lyda/misspell-check
|
||||
* https://github.com/lucasdemarchi/codespell
|
||||
|
||||
They all work but had problems that prevented me from using them at scale:
|
||||
|
||||
* slow, all of the above check one misspelling at a time (i.e. linear) using regexps
|
||||
* not MIT/Apache2 licensed (or equivalent)
|
||||
* have dependencies that don't work for me (python3, bash, linux sed, etc)
|
||||
* don't understand American vs. British English and sometimes makes unwelcome "corrections"
|
||||
|
||||
That said, they might be perfect for you and many have more features
|
||||
than this project!
|
||||
|
||||
<a name="performance"></a>
|
||||
### How fast is it?
|
||||
|
||||
Misspell is easily 100x to 1000x faster than other spelling correctors. You
|
||||
should be able to check and correct 1000 files in under 250ms.
|
||||
|
||||
This uses the mighty power of golang's
|
||||
[strings.Replacer](https://golang.org/pkg/strings/#Replacer) which is
|
||||
a implementation or variation of the
|
||||
[Aho–Corasick algorithm](https://en.wikipedia.org/wiki/Aho–Corasick_algorithm).
|
||||
This makes multiple substring matches *simultaneously*.
|
||||
|
||||
In addition this uses multiple CPU cores to work on multiple files.
|
||||
|
||||
<a name="issues"></a>
|
||||
### What problems does it have?
|
||||
|
||||
Unlike the other projects, this doesn't know what a "word" is. There may be
|
||||
more false positives and false negatives due to this. On the other hand, it
|
||||
sometimes catches things others don't.
|
||||
|
||||
Either way, please file bugs and we'll fix them!
|
||||
|
||||
Since it operates in parallel to make corrections, it can be non-obvious to
|
||||
determine exactly what word was corrected.
|
||||
|
||||
<a name="debug"></a>
|
||||
### It's making mistakes. How can I debug?
|
||||
|
||||
Run using `-debug` flag on the file you want. It should then print what word
|
||||
it is trying to correct. Then [file a
|
||||
bug](https://github.com/client9/misspell/issues) describing the problem.
|
||||
Thanks!
|
||||
|
||||
<a name="missing"></a>
|
||||
### Why is it making mistakes or missing items in golang files?
|
||||
|
||||
The matching function is *case-sensitive*, so variable names that are multiple
|
||||
worlds either in all-upper or all-lower case sometimes can cause false
|
||||
positives. For instance a variable named `bodyreader` could trigger a false
|
||||
positive since `yrea` is in the middle that could be corrected to `year`.
|
||||
Other problems happen if the variable name uses a English contraction that
|
||||
should use an apostrophe. The best way of fixing this is to use the
|
||||
[Effective Go naming
|
||||
conventions](https://golang.org/doc/effective_go.html#mixed-caps) and use
|
||||
[camelCase](https://en.wikipedia.org/wiki/CamelCase) for variable names. You
|
||||
can check your code using [golint](https://github.com/golang/lint)
|
||||
|
||||
<a name="license"></a>
|
||||
### What license is this?
|
||||
|
||||
The main code is [MIT](https://github.com/client9/misspell/blob/master/LICENSE).
|
||||
|
||||
Misspell also makes uses of the Golang standard library and contains a modified version of Golang's [strings.Replacer](https://golang.org/pkg/strings/#Replacer)
|
||||
which are covered under a [BSD License](https://github.com/golang/go/blob/master/LICENSE). Type `misspell -legal` for more details or see [legal.go](https://github.com/client9/misspell/blob/master/legal.go)
|
||||
|
||||
<a name="words"></a>
|
||||
### Where do the word lists come from?
|
||||
|
||||
It started with a word list from
|
||||
[Wikipedia](https://en.wikipedia.org/wiki/Wikipedia:Lists_of_common_misspellings/For_machines).
|
||||
Unfortunately, this list had to be highly edited as many of the words are
|
||||
obsolete or based from mistakes on mechanical typewriters (I'm guessing).
|
||||
|
||||
Additional words were added based on actually mistakes seen in
|
||||
the wild (meaning self-generated).
|
||||
|
||||
Variations of UK and US spellings are based on many sources including:
|
||||
|
||||
* http://www.tysto.com/uk-us-spelling-list.html (with heavy editing, many are incorrect)
|
||||
* http://www.oxforddictionaries.com/us/words/american-and-british-spelling-american (excellent site but incomplete)
|
||||
* Diffing US and UK [scowl dictionaries](http://wordlist.aspell.net)
|
||||
|
||||
American English is more accepting of spelling variations than is British
|
||||
English, so "what is American or not" is subject to opinion. Corrections and help welcome.
|
||||
|
||||
<a name="otherideas"></a>
|
||||
### What are some other enhancements that could be done?
|
||||
|
||||
Here's some ideas for enhancements:
|
||||
|
||||
*Capitalization of proper nouns* could be done (e.g. weekday and month names, country names, language names)
|
||||
|
||||
*Opinionated US spellings* US English has a number of words with alternate
|
||||
spellings. Think [adviser vs.
|
||||
advisor](http://grammarist.com/spelling/adviser-advisor/). While "advisor" is not wrong, the opinionated US
|
||||
locale would correct "advisor" to "adviser".
|
||||
|
||||
*Versioning* Some type of versioning is needed so reporting mistakes and errors is easier.
|
||||
|
||||
*Feedback* Mistakes would be sent to some server for agregation and feedback review.
|
||||
|
||||
*Contractions and Apostrophes* This would optionally correct "isnt" to
|
||||
"isn't", etc.
|
|
@ -1,105 +0,0 @@
|
|||
package misspell
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
)
|
||||
|
||||
var (
|
||||
sampleClean string
|
||||
sampleDirty string
|
||||
tmpCount int
|
||||
tmp string
|
||||
rep *Replacer
|
||||
)
|
||||
|
||||
func init() {
|
||||
|
||||
buf := bytes.Buffer{}
|
||||
for i := 0; i < len(DictMain); i += 2 {
|
||||
buf.WriteString(DictMain[i+1] + " ")
|
||||
if i%5 == 0 {
|
||||
buf.WriteString("\n")
|
||||
}
|
||||
}
|
||||
sampleClean = buf.String()
|
||||
sampleDirty = sampleClean + DictMain[0] + "\n"
|
||||
rep = New()
|
||||
}
|
||||
|
||||
// BenchmarkCleanString takes a clean string (one with no errors)
|
||||
func BenchmarkCleanString(b *testing.B) {
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
var updated string
|
||||
var diffs []Diff
|
||||
var count int
|
||||
for n := 0; n < b.N; n++ {
|
||||
updated, diffs = rep.Replace(sampleClean)
|
||||
count += len(diffs)
|
||||
}
|
||||
|
||||
// prevent compilier optimizations
|
||||
tmpCount = count
|
||||
tmp = updated
|
||||
}
|
||||
|
||||
func discardDiff(_ Diff) {
|
||||
tmpCount++
|
||||
}
|
||||
|
||||
// BenchmarkCleanStream takes a clean reader (no misspells) and outputs to a buffer
|
||||
func BenchmarkCleanStream(b *testing.B) {
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
tmpCount = 0
|
||||
buf := bytes.NewBufferString(sampleClean)
|
||||
out := bytes.NewBuffer(make([]byte, 0, len(sampleClean)+100))
|
||||
for n := 0; n < b.N; n++ {
|
||||
buf.Reset()
|
||||
buf.WriteString(sampleClean)
|
||||
out.Reset()
|
||||
rep.ReplaceReader(buf, out, discardDiff)
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkCleanStreamDiscard takes a clean reader and discards output
|
||||
func BenchmarkCleanStreamDiscard(b *testing.B) {
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
|
||||
buf := bytes.NewBufferString(sampleClean)
|
||||
tmpCount = 0
|
||||
for n := 0; n < b.N; n++ {
|
||||
buf.Reset()
|
||||
buf.WriteString(sampleClean)
|
||||
rep.ReplaceReader(buf, ioutil.Discard, discardDiff)
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkCleanString takes a clean string (one with no errors)
|
||||
func BenchmarkDirtyString(b *testing.B) {
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
var updated string
|
||||
var diffs []Diff
|
||||
var count int
|
||||
for n := 0; n < b.N; n++ {
|
||||
updated, diffs = rep.Replace(sampleDirty)
|
||||
count += len(diffs)
|
||||
}
|
||||
|
||||
// prevent compilier optimizations
|
||||
tmpCount = count
|
||||
tmp = updated
|
||||
}
|
||||
|
||||
func BenchmarkCompile(b *testing.B) {
|
||||
r := New()
|
||||
b.ReportAllocs()
|
||||
b.ResetTimer()
|
||||
for n := 0; n < b.N; n++ {
|
||||
r.Compile()
|
||||
}
|
||||
}
|
|
@ -1,42 +0,0 @@
|
|||
package misspell
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestCaseStyle(t *testing.T) {
|
||||
cases := []struct {
|
||||
word string
|
||||
want WordCase
|
||||
}{
|
||||
{"lower", CaseLower},
|
||||
{"what's", CaseLower},
|
||||
{"UPPER", CaseUpper},
|
||||
{"Title", CaseTitle},
|
||||
{"CamelCase", CaseUnknown},
|
||||
{"camelCase", CaseUnknown},
|
||||
}
|
||||
|
||||
for pos, tt := range cases {
|
||||
got := CaseStyle(tt.word)
|
||||
if tt.want != got {
|
||||
t.Errorf("Case %d %q: want %v got %v", pos, tt.word, tt.want, got)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestCaseVariations(t *testing.T) {
|
||||
cases := []struct {
|
||||
word string
|
||||
want []string
|
||||
}{
|
||||
{"that's", []string{"that's", "That's", "THAT'S"}},
|
||||
}
|
||||
for pos, tt := range cases {
|
||||
got := CaseVariations(tt.word, CaseStyle(tt.word))
|
||||
if !reflect.DeepEqual(tt.want, got) {
|
||||
t.Errorf("Case %d %q: want %v got %v", pos, tt.word, tt.want, got)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,3 +1,4 @@
|
|||
// The misspell command corrects commonly misspelled English words in source files.
|
||||
package main
|
||||
|
||||
import (
|
||||
|
|
|
@ -1,136 +0,0 @@
|
|||
package misspell
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestFalsePositives(t *testing.T) {
|
||||
cases := []string{
|
||||
"importEnd",
|
||||
"drinkeries",
|
||||
"subscripting",
|
||||
"unprojected",
|
||||
"updaters",
|
||||
"templatize",
|
||||
"requesters",
|
||||
"requestors",
|
||||
"replicaset",
|
||||
"parallelise",
|
||||
"parallelize",
|
||||
"perceptron", // http://foldoc.org/perceptron
|
||||
"perceptrons", // ^^
|
||||
"convertors", // alt spelling
|
||||
"adventurers",
|
||||
" s.svc.GetObject ",
|
||||
"infinitie.net",
|
||||
"foo summaries\n",
|
||||
"thru",
|
||||
"publically",
|
||||
"6YUO5", // base64
|
||||
"cleaner", // triggered by "cleane->cleanser" and partial word FP
|
||||
" http.Redirect(w, req, req.URL.Path, http.StatusFound) ",
|
||||
"url is http://zeebra.com ",
|
||||
"path is /zeebra?zeebra=zeebra ",
|
||||
"Malcom_McLean",
|
||||
"implementor", // alt spelling, see https://github.com/client9/misspell/issues/46
|
||||
"searchtypes",
|
||||
" witness",
|
||||
"returndata",
|
||||
"UNDERSTOOD",
|
||||
"textinterface",
|
||||
" committed ",
|
||||
"committed",
|
||||
"Bengali",
|
||||
"Portuguese",
|
||||
"scientists",
|
||||
"causally",
|
||||
"embarrassing",
|
||||
"setuptools", // python package
|
||||
"committing",
|
||||
"guises",
|
||||
"disguise",
|
||||
"begging",
|
||||
"cmo",
|
||||
"cmos",
|
||||
"borked",
|
||||
"hadn't",
|
||||
"Iceweasel",
|
||||
"summarised",
|
||||
"autorenew",
|
||||
"travelling",
|
||||
"republished",
|
||||
"fallthru",
|
||||
"pruning",
|
||||
"deb.VersionDontCare",
|
||||
"authtag",
|
||||
"intrepid",
|
||||
"usefully",
|
||||
"there",
|
||||
"definite",
|
||||
"earliest",
|
||||
"Japanese",
|
||||
"international",
|
||||
"excellent",
|
||||
"gracefully",
|
||||
"carefully",
|
||||
"class",
|
||||
"include",
|
||||
"process",
|
||||
"address",
|
||||
"attempt",
|
||||
"large",
|
||||
"although",
|
||||
"specific",
|
||||
"taste",
|
||||
"against",
|
||||
"successfully",
|
||||
"unsuccessfully",
|
||||
"occurred",
|
||||
"agree",
|
||||
"controlled",
|
||||
"publisher",
|
||||
"strategy",
|
||||
"geoposition",
|
||||
"paginated",
|
||||
"happened",
|
||||
"relative",
|
||||
"computing",
|
||||
"language",
|
||||
"manual",
|
||||
"token",
|
||||
"into",
|
||||
"nothing",
|
||||
"datatool",
|
||||
"propose",
|
||||
"learnt",
|
||||
"tolerant",
|
||||
"whitehat",
|
||||
"monotonic",
|
||||
"comprised",
|
||||
"indemnity",
|
||||
"flattened",
|
||||
"interrupted",
|
||||
"inotify",
|
||||
"occasional",
|
||||
"forging",
|
||||
"ampersand",
|
||||
"decomposition",
|
||||
"commit",
|
||||
"programmer", // "grammer"
|
||||
// "requestsinserted",
|
||||
"seeked", // technical word
|
||||
"bodyreader", // variable name
|
||||
"cantPrepare", // variable name
|
||||
"dontPrepare", // variable name
|
||||
"\\nto", // https://github.com/client9/misspell/issues/93
|
||||
"4f8b42c22dd3729b519ba6f68d2da7cc5b2d606d05daed5ad5128cc03e6c6358", // https://github.com/client9/misspell/issues/97
|
||||
}
|
||||
r := New()
|
||||
r.Debug = true
|
||||
for casenum, tt := range cases {
|
||||
got, _ := r.Replace(tt)
|
||||
if got != tt {
|
||||
t.Errorf("%d: %q got converted to %q", casenum, tt, got)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,29 +0,0 @@
|
|||
# goreleaser.yml
|
||||
# https://github.com/goreleaser/goreleaser
|
||||
build:
|
||||
main: cmd/misspell/main.go
|
||||
binary: misspell
|
||||
ldflags: -s -w -X main.version={{.Version}}
|
||||
goos:
|
||||
- darwin
|
||||
- linux
|
||||
- windows
|
||||
goarch:
|
||||
- amd64
|
||||
env:
|
||||
- CGO_ENABLED=0
|
||||
ignore:
|
||||
- goos: darwin
|
||||
goarch: 386
|
||||
- goos: windows
|
||||
goarch: 386
|
||||
|
||||
archive:
|
||||
name_template: "{{ .Binary }}_{{ .Version }}_{{ .Os }}_{{ .Arch }}"
|
||||
replacements:
|
||||
amd64: 64bit
|
||||
386: 32bit
|
||||
darwin: mac
|
||||
|
||||
snapshot:
|
||||
name_template: SNAPSHOT-{{.Commit}}
|
|
@ -1,318 +0,0 @@
|
|||
#!/bin/sh
|
||||
set -e
|
||||
# Code generated by godownloader. DO NOT EDIT.
|
||||
#
|
||||
|
||||
usage() {
|
||||
this=$1
|
||||
cat <<EOF
|
||||
$this: download go binaries for client9/misspell
|
||||
|
||||
Usage: $this [-b] bindir [version]
|
||||
-b sets bindir or installation directory, default "./bin"
|
||||
[version] is a version number from
|
||||
https://github.com/client9/misspell/releases
|
||||
If version is missing, then an attempt to find the latest will be found.
|
||||
|
||||
Generated by godownloader
|
||||
https://github.com/goreleaser/godownloader
|
||||
|
||||
EOF
|
||||
exit 2
|
||||
}
|
||||
|
||||
parse_args() {
|
||||
#BINDIR is ./bin unless set be ENV
|
||||
# over-ridden by flag below
|
||||
|
||||
BINDIR=${BINDIR:-./bin}
|
||||
while getopts "b:h?" arg; do
|
||||
case "$arg" in
|
||||
b) BINDIR="$OPTARG" ;;
|
||||
h | \?) usage "$0" ;;
|
||||
esac
|
||||
done
|
||||
shift $((OPTIND - 1))
|
||||
VERSION=$1
|
||||
}
|
||||
# this function wraps all the destructive operations
|
||||
# if a curl|bash cuts off the end of the script due to
|
||||
# network, either nothing will happen or will syntax error
|
||||
# out preventing half-done work
|
||||
execute() {
|
||||
TMPDIR=$(mktmpdir)
|
||||
echo "$PREFIX: downloading ${TARBALL_URL}"
|
||||
http_download "${TMPDIR}/${TARBALL}" "${TARBALL_URL}"
|
||||
|
||||
echo "$PREFIX: verifying checksums"
|
||||
http_download "${TMPDIR}/${CHECKSUM}" "${CHECKSUM_URL}"
|
||||
hash_sha256_verify "${TMPDIR}/${TARBALL}" "${TMPDIR}/${CHECKSUM}"
|
||||
|
||||
(cd "${TMPDIR}" && untar "${TARBALL}")
|
||||
install -d "${BINDIR}"
|
||||
install "${TMPDIR}/${BINARY}" "${BINDIR}/"
|
||||
echo "$PREFIX: installed as ${BINDIR}/${BINARY}"
|
||||
}
|
||||
is_supported_platform() {
|
||||
platform=$1
|
||||
found=1
|
||||
case "$platform" in
|
||||
darwin/amd64) found=0 ;;
|
||||
linux/amd64) found=0 ;;
|
||||
esac
|
||||
case "$platform" in
|
||||
darwin/386) found=1 ;;
|
||||
esac
|
||||
return $found
|
||||
}
|
||||
check_platform() {
|
||||
if is_supported_platform "$PLATFORM"; then
|
||||
# optional logging goes here
|
||||
true
|
||||
else
|
||||
echo "${PREFIX}: platform $PLATFORM is not supported. Make sure this script is up-to-date and file request at https://github.com/${PREFIX}/issues/new"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
adjust_version() {
|
||||
if [ -z "${VERSION}" ]; then
|
||||
echo "$PREFIX: checking GitHub for latest version"
|
||||
VERSION=$(github_last_release "$OWNER/$REPO")
|
||||
fi
|
||||
# if version starts with 'v', remove it
|
||||
VERSION=${VERSION#v}
|
||||
}
|
||||
adjust_format() {
|
||||
# change format (tar.gz or zip) based on ARCH
|
||||
true
|
||||
}
|
||||
adjust_os() {
|
||||
# adjust archive name based on OS
|
||||
case ${OS} in
|
||||
386) OS=32bit ;;
|
||||
amd64) OS=64bit ;;
|
||||
darwin) OS=mac ;;
|
||||
esac
|
||||
true
|
||||
}
|
||||
adjust_arch() {
|
||||
# adjust archive name based on ARCH
|
||||
case ${ARCH} in
|
||||
386) ARCH=32bit ;;
|
||||
amd64) ARCH=64bit ;;
|
||||
darwin) ARCH=mac ;;
|
||||
esac
|
||||
true
|
||||
}
|
||||
|
||||
cat /dev/null <<EOF
|
||||
------------------------------------------------------------------------
|
||||
https://github.com/client9/shlib - portable posix shell functions
|
||||
Public domain - http://unlicense.org
|
||||
https://github.com/client9/shlib/blob/master/LICENSE.md
|
||||
but credit (and pull requests) appreciated.
|
||||
------------------------------------------------------------------------
|
||||
EOF
|
||||
is_command() {
|
||||
command -v "$1" >/dev/null
|
||||
}
|
||||
uname_os() {
|
||||
os=$(uname -s | tr '[:upper:]' '[:lower:]')
|
||||
echo "$os"
|
||||
}
|
||||
uname_arch() {
|
||||
arch=$(uname -m)
|
||||
case $arch in
|
||||
x86_64) arch="amd64" ;;
|
||||
x86) arch="386" ;;
|
||||
i686) arch="386" ;;
|
||||
i386) arch="386" ;;
|
||||
aarch64) arch="arm64" ;;
|
||||
armv5*) arch="arm5" ;;
|
||||
armv6*) arch="arm6" ;;
|
||||
armv7*) arch="arm7" ;;
|
||||
esac
|
||||
echo ${arch}
|
||||
}
|
||||
uname_os_check() {
|
||||
os=$(uname_os)
|
||||
case "$os" in
|
||||
darwin) return 0 ;;
|
||||
dragonfly) return 0 ;;
|
||||
freebsd) return 0 ;;
|
||||
linux) return 0 ;;
|
||||
android) return 0 ;;
|
||||
nacl) return 0 ;;
|
||||
netbsd) return 0 ;;
|
||||
openbsd) return 0 ;;
|
||||
plan9) return 0 ;;
|
||||
solaris) return 0 ;;
|
||||
windows) return 0 ;;
|
||||
esac
|
||||
echo "$0: uname_os_check: internal error '$(uname -s)' got converted to '$os' which is not a GOOS value. Please file bug at https://github.com/client9/shlib"
|
||||
return 1
|
||||
}
|
||||
uname_arch_check() {
|
||||
arch=$(uname_arch)
|
||||
case "$arch" in
|
||||
386) return 0 ;;
|
||||
amd64) return 0 ;;
|
||||
arm64) return 0 ;;
|
||||
armv5) return 0 ;;
|
||||
armv6) return 0 ;;
|
||||
armv7) return 0 ;;
|
||||
ppc64) return 0 ;;
|
||||
ppc64le) return 0 ;;
|
||||
mips) return 0 ;;
|
||||
mipsle) return 0 ;;
|
||||
mips64) return 0 ;;
|
||||
mips64le) return 0 ;;
|
||||
s390x) return 0 ;;
|
||||
amd64p32) return 0 ;;
|
||||
esac
|
||||
echo "$0: uname_arch_check: internal error '$(uname -m)' got converted to '$arch' which is not a GOARCH value. Please file bug report at https://github.com/client9/shlib"
|
||||
return 1
|
||||
}
|
||||
untar() {
|
||||
tarball=$1
|
||||
case "${tarball}" in
|
||||
*.tar.gz | *.tgz) tar -xzf "${tarball}" ;;
|
||||
*.tar) tar -xf "${tarball}" ;;
|
||||
*.zip) unzip "${tarball}" ;;
|
||||
*)
|
||||
echo "Unknown archive format for ${tarball}"
|
||||
return 1
|
||||
;;
|
||||
esac
|
||||
}
|
||||
mktmpdir() {
|
||||
test -z "$TMPDIR" && TMPDIR="$(mktemp -d)"
|
||||
mkdir -p "${TMPDIR}"
|
||||
echo "${TMPDIR}"
|
||||
}
|
||||
http_download() {
|
||||
local_file=$1
|
||||
source_url=$2
|
||||
header=$3
|
||||
headerflag=''
|
||||
destflag=''
|
||||
if is_command curl; then
|
||||
cmd='curl --fail -sSL'
|
||||
destflag='-o'
|
||||
headerflag='-H'
|
||||
elif is_command wget; then
|
||||
cmd='wget -q'
|
||||
destflag='-O'
|
||||
headerflag='--header'
|
||||
else
|
||||
echo "http_download: unable to find wget or curl"
|
||||
return 1
|
||||
fi
|
||||
if [ -z "$header" ]; then
|
||||
$cmd $destflag "$local_file" "$source_url"
|
||||
else
|
||||
$cmd $headerflag "$header" $destflag "$local_file" "$source_url"
|
||||
fi
|
||||
}
|
||||
github_api() {
|
||||
local_file=$1
|
||||
source_url=$2
|
||||
header=""
|
||||
case "$source_url" in
|
||||
https://api.github.com*)
|
||||
test -z "$GITHUB_TOKEN" || header="Authorization: token $GITHUB_TOKEN"
|
||||
;;
|
||||
esac
|
||||
http_download "$local_file" "$source_url" "$header"
|
||||
}
|
||||
github_last_release() {
|
||||
owner_repo=$1
|
||||
giturl="https://api.github.com/repos/${owner_repo}/releases/latest"
|
||||
html=$(github_api - "$giturl")
|
||||
version=$(echo "$html" | grep -m 1 "\"tag_name\":" | cut -f4 -d'"')
|
||||
test -z "$version" && return 1
|
||||
echo "$version"
|
||||
}
|
||||
hash_sha256() {
|
||||
TARGET=${1:-/dev/stdin}
|
||||
if is_command gsha256sum; then
|
||||
hash=$(gsha256sum "$TARGET") || return 1
|
||||
echo "$hash" | cut -d ' ' -f 1
|
||||
elif is_command sha256sum; then
|
||||
hash=$(sha256sum "$TARGET") || return 1
|
||||
echo "$hash" | cut -d ' ' -f 1
|
||||
elif is_command shasum; then
|
||||
hash=$(shasum -a 256 "$TARGET" 2>/dev/null) || return 1
|
||||
echo "$hash" | cut -d ' ' -f 1
|
||||
elif is_command openssl; then
|
||||
hash=$(openssl -dst openssl dgst -sha256 "$TARGET") || return 1
|
||||
echo "$hash" | cut -d ' ' -f a
|
||||
else
|
||||
echo "hash_sha256: unable to find command to compute sha-256 hash"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
hash_sha256_verify() {
|
||||
TARGET=$1
|
||||
checksums=$2
|
||||
if [ -z "$checksums" ]; then
|
||||
echo "hash_sha256_verify: checksum file not specified in arg2"
|
||||
return 1
|
||||
fi
|
||||
BASENAME=${TARGET##*/}
|
||||
want=$(grep "${BASENAME}" "${checksums}" 2>/dev/null | tr '\t' ' ' | cut -d ' ' -f 1)
|
||||
if [ -z "$want" ]; then
|
||||
echo "hash_sha256_verify: unable to find checksum for '${TARGET}' in '${checksums}'"
|
||||
return 1
|
||||
fi
|
||||
got=$(hash_sha256 "$TARGET")
|
||||
if [ "$want" != "$got" ]; then
|
||||
echo "hash_sha256_verify: checksum for '$TARGET' did not verify ${want} vs $got"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
cat /dev/null <<EOF
|
||||
------------------------------------------------------------------------
|
||||
End of functions from https://github.com/client9/shlib
|
||||
------------------------------------------------------------------------
|
||||
EOF
|
||||
|
||||
OWNER=client9
|
||||
REPO=misspell
|
||||
BINARY=misspell
|
||||
FORMAT=tar.gz
|
||||
OS=$(uname_os)
|
||||
ARCH=$(uname_arch)
|
||||
PREFIX="$OWNER/$REPO"
|
||||
PLATFORM="${OS}/${ARCH}"
|
||||
GITHUB_DOWNLOAD=https://github.com/${OWNER}/${REPO}/releases/download
|
||||
|
||||
uname_os_check "$OS"
|
||||
uname_arch_check "$ARCH"
|
||||
|
||||
parse_args "$@"
|
||||
|
||||
check_platform
|
||||
|
||||
adjust_version
|
||||
|
||||
adjust_format
|
||||
|
||||
adjust_os
|
||||
|
||||
adjust_arch
|
||||
|
||||
echo "$PREFIX: found version ${VERSION} for ${OS}/${ARCH}"
|
||||
|
||||
NAME=${BINARY}_${VERSION}_${OS}_${ARCH}
|
||||
TARBALL=${NAME}.${FORMAT}
|
||||
TARBALL_URL=${GITHUB_DOWNLOAD}/v${VERSION}/${TARBALL}
|
||||
CHECKSUM=${REPO}_checksums.txt
|
||||
CHECKSUM_URL=${GITHUB_DOWNLOAD}/v${VERSION}/${CHECKSUM}
|
||||
|
||||
# Adjust binary name if windows
|
||||
if [ "$OS" = "windows" ]; then
|
||||
BINARY="${BINARY}.exe"
|
||||
fi
|
||||
|
||||
execute
|
|
@ -1,3 +1,4 @@
|
|||
// Package misspell corrects commonly misspelled English words in source files.
|
||||
package misspell
|
||||
|
||||
// Legal provides licensing info.
|
||||
|
|
|
@ -1,39 +0,0 @@
|
|||
package misspell
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestIsBinaryFile(t *testing.T) {
|
||||
cases := []struct {
|
||||
path string
|
||||
want bool
|
||||
}{
|
||||
{"foo.png", true},
|
||||
{"foo.PNG", true},
|
||||
{"README", false},
|
||||
{"foo.txt", false},
|
||||
}
|
||||
|
||||
for num, tt := range cases {
|
||||
if isBinaryFilename(tt.path) != tt.want {
|
||||
t.Errorf("Case %d: %s was not %v", num, tt.path, tt.want)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsSCMPath(t *testing.T) {
|
||||
cases := []struct {
|
||||
path string
|
||||
want bool
|
||||
}{
|
||||
{"foo.png", false},
|
||||
{"foo/.git/whatever", true},
|
||||
}
|
||||
|
||||
for num, tt := range cases {
|
||||
if isSCMPath(tt.path) != tt.want {
|
||||
t.Errorf("Case %d: %s was not %v", num, tt.path, tt.want)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,27 +0,0 @@
|
|||
package misspell
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestNotWords(t *testing.T) {
|
||||
cases := []struct {
|
||||
word string
|
||||
want string
|
||||
}{
|
||||
{" /foo/bar abc", " abc"},
|
||||
{"X/foo/bar abc", "X/foo/bar abc"},
|
||||
{"[/foo/bar] abc", "[ ] abc"},
|
||||
{"/", "/"},
|
||||
{"x nickg@client9.xxx y", "x y"},
|
||||
{"x infinitie.net y", "x y"},
|
||||
{"(s.svc.GetObject(", "( ("},
|
||||
{"\\nto", " to"},
|
||||
}
|
||||
for pos, tt := range cases {
|
||||
got := RemoveNotWords(tt.word)
|
||||
if got != tt.want {
|
||||
t.Errorf("%d want %q got %q", pos, tt.want, got)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,119 +0,0 @@
|
|||
package misspell
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestReplaceIgnore(t *testing.T) {
|
||||
cases := []struct {
|
||||
ignore string
|
||||
text string
|
||||
}{
|
||||
{"knwo,gae", "https://github.com/Unknwon, github.com/hnakamur/gaesessions"},
|
||||
}
|
||||
for line, tt := range cases {
|
||||
r := New()
|
||||
r.RemoveRule(strings.Split(tt.ignore, ","))
|
||||
r.Compile()
|
||||
got, _ := r.Replace(tt.text)
|
||||
if got != tt.text {
|
||||
t.Errorf("%d: Replace files want %q got %q", line, tt.text, got)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestReplaceLocale(t *testing.T) {
|
||||
cases := []struct {
|
||||
orig string
|
||||
want string
|
||||
}{
|
||||
{"The colours are pretty", "The colors are pretty"},
|
||||
{"summaries", "summaries"},
|
||||
}
|
||||
|
||||
r := New()
|
||||
r.AddRuleList(DictAmerican)
|
||||
r.Compile()
|
||||
for line, tt := range cases {
|
||||
got, _ := r.Replace(tt.orig)
|
||||
if got != tt.want {
|
||||
t.Errorf("%d: ReplaceLocale want %q got %q", line, tt.orig, got)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestReplace(t *testing.T) {
|
||||
cases := []struct {
|
||||
orig string
|
||||
want string
|
||||
}{
|
||||
{"I live in Amercia", "I live in America"},
|
||||
{"grill brocoli now", "grill broccoli now"},
|
||||
{"There is a zeebra", "There is a zebra"},
|
||||
{"foo other bar", "foo other bar"},
|
||||
{"ten fiels", "ten fields"},
|
||||
{"Closeing Time", "Closing Time"},
|
||||
{"closeing Time", "closing Time"},
|
||||
{" TOOD: foobar", " TODO: foobar"},
|
||||
{" preceed ", " precede "},
|
||||
{"preceeding", "preceding"},
|
||||
{"functionallity", "functionality"},
|
||||
}
|
||||
r := New()
|
||||
for line, tt := range cases {
|
||||
got, _ := r.Replace(tt.orig)
|
||||
if got != tt.want {
|
||||
t.Errorf("%d: Replace files want %q got %q", line, tt.orig, got)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestCheckReplace(t *testing.T) {
|
||||
r := Replacer{
|
||||
engine: NewStringReplacer("foo", "foobar", "runing", "running"),
|
||||
corrected: map[string]string{
|
||||
"foo": "foobar",
|
||||
"runing": "running",
|
||||
},
|
||||
}
|
||||
|
||||
s := "nothing at all"
|
||||
news, diffs := r.Replace(s)
|
||||
if s != news || len(diffs) != 0 {
|
||||
t.Errorf("Basic recheck failed: %q vs %q", s, news)
|
||||
}
|
||||
|
||||
//
|
||||
// Test single, correct,.Correctedacements
|
||||
//
|
||||
s = "foo"
|
||||
news, diffs = r.Replace(s)
|
||||
if news != "foobar" || len(diffs) != 1 || diffs[0].Original != "foo" && diffs[0].Corrected != "foobar" && diffs[0].Column != 0 {
|
||||
t.Errorf("basic recheck1 failed %q vs %q", s, news)
|
||||
}
|
||||
s = "foo junk"
|
||||
news, diffs = r.Replace(s)
|
||||
if news != "foobar junk" || len(diffs) != 1 || diffs[0].Original != "foo" && diffs[0].Corrected != "foobar" && diffs[0].Column != 0 {
|
||||
t.Errorf("basic recheck2 failed %q vs %q, %v", s, news, diffs[0])
|
||||
}
|
||||
|
||||
s = "junk foo"
|
||||
news, diffs = r.Replace(s)
|
||||
if news != "junk foobar" || len(diffs) != 1 || diffs[0].Original != "foo" && diffs[0].Corrected != "foobar" && diffs[0].Column != 5 {
|
||||
t.Errorf("basic recheck3 failed: %q vs %q", s, news)
|
||||
}
|
||||
|
||||
s = "junk foo junk"
|
||||
news, diffs = r.Replace(s)
|
||||
if news != "junk foobar junk" || len(diffs) != 1 || diffs[0].Original != "foo" && diffs[0].Corrected != "foobar" && diffs[0].Column != 5 {
|
||||
t.Errorf("basic recheck4 failed: %q vs %q", s, news)
|
||||
}
|
||||
|
||||
// Incorrect.Correctedacements
|
||||
s = "food pruning"
|
||||
news, _ = r.Replace(s)
|
||||
if news != s {
|
||||
t.Errorf("incorrect.Correctedacement failed: %q vs %q", s, news)
|
||||
}
|
||||
}
|
|
@ -1,2 +0,0 @@
|
|||
#!/bin/sh -ex
|
||||
misspell -error "$1"
|
|
@ -1,3 +0,0 @@
|
|||
#!/bin/sh -e
|
||||
# autorelease based on tag
|
||||
test -n "$TRAVIS_TAG" && curl -sL https://git.io/goreleaser | bash
|
|
@ -1,2 +0,0 @@
|
|||
#!/bin/sh -ex
|
||||
make ci
|
|
@ -1,9 +0,0 @@
|
|||
#!/bin/sh -ex
|
||||
#
|
||||
# This updates the 'godownloader-*.sh' scripts from upstream
|
||||
# This is done manually
|
||||
#
|
||||
SOURCE=https://raw.githubusercontent.com/goreleaser/godownloader/master/samples
|
||||
curl --fail -o godownloader-misspell.sh "$SOURCE/godownloader-misspell.sh"
|
||||
chmod a+x godownloader-misspell.sh
|
||||
|
|
@ -1,421 +0,0 @@
|
|||
// Copyright 2009 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package misspell_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
. "github.com/client9/misspell"
|
||||
)
|
||||
|
||||
var htmlEscaper = NewStringReplacer(
|
||||
"&", "&",
|
||||
"<", "<",
|
||||
">", ">",
|
||||
`"`, """,
|
||||
"'", "'",
|
||||
)
|
||||
|
||||
var htmlUnescaper = NewStringReplacer(
|
||||
"&", "&",
|
||||
"<", "<",
|
||||
">", ">",
|
||||
""", `"`,
|
||||
"'", "'",
|
||||
)
|
||||
|
||||
// The http package's old HTML escaping function.
|
||||
func oldHTMLEscape(s string) string {
|
||||
s = strings.Replace(s, "&", "&", -1)
|
||||
s = strings.Replace(s, "<", "<", -1)
|
||||
s = strings.Replace(s, ">", ">", -1)
|
||||
s = strings.Replace(s, `"`, """, -1)
|
||||
s = strings.Replace(s, "'", "'", -1)
|
||||
return s
|
||||
}
|
||||
|
||||
var capitalLetters = NewStringReplacer("a", "A", "b", "B")
|
||||
|
||||
// TestReplacer tests the replacer implementations.
|
||||
func TestReplacer(t *testing.T) {
|
||||
type testCase struct {
|
||||
r *StringReplacer
|
||||
in, out string
|
||||
}
|
||||
var testCases []testCase
|
||||
|
||||
// str converts 0xff to "\xff". This isn't just string(b) since that converts to UTF-8.
|
||||
str := func(b byte) string {
|
||||
return string([]byte{b})
|
||||
}
|
||||
var s []string
|
||||
|
||||
// inc maps "\x00"->"\x01", ..., "a"->"b", "b"->"c", ..., "\xff"->"\x00".
|
||||
for i := 0; i < 256; i++ {
|
||||
s = append(s, str(byte(i)), str(byte(i+1)))
|
||||
}
|
||||
inc := NewStringReplacer(s...)
|
||||
|
||||
// Test cases with 1-byte old strings, 1-byte new strings.
|
||||
testCases = append(testCases,
|
||||
testCase{capitalLetters, "brad", "BrAd"},
|
||||
testCase{capitalLetters, strings.Repeat("a", (32<<10)+123), strings.Repeat("A", (32<<10)+123)},
|
||||
testCase{capitalLetters, "", ""},
|
||||
|
||||
testCase{inc, "brad", "csbe"},
|
||||
testCase{inc, "\x00\xff", "\x01\x00"},
|
||||
testCase{inc, "", ""},
|
||||
|
||||
testCase{NewStringReplacer("a", "1", "a", "2"), "brad", "br1d"},
|
||||
)
|
||||
|
||||
// repeat maps "a"->"a", "b"->"bb", "c"->"ccc", ...
|
||||
s = nil
|
||||
for i := 0; i < 256; i++ {
|
||||
n := i + 1 - 'a'
|
||||
if n < 1 {
|
||||
n = 1
|
||||
}
|
||||
s = append(s, str(byte(i)), strings.Repeat(str(byte(i)), n))
|
||||
}
|
||||
repeat := NewStringReplacer(s...)
|
||||
|
||||
// Test cases with 1-byte old strings, variable length new strings.
|
||||
testCases = append(testCases,
|
||||
testCase{htmlEscaper, "No changes", "No changes"},
|
||||
testCase{htmlEscaper, "I <3 escaping & stuff", "I <3 escaping & stuff"},
|
||||
testCase{htmlEscaper, "&&&", "&&&"},
|
||||
testCase{htmlEscaper, "", ""},
|
||||
|
||||
testCase{repeat, "brad", "bbrrrrrrrrrrrrrrrrrradddd"},
|
||||
testCase{repeat, "abba", "abbbba"},
|
||||
testCase{repeat, "", ""},
|
||||
|
||||
testCase{NewStringReplacer("a", "11", "a", "22"), "brad", "br11d"},
|
||||
)
|
||||
|
||||
// The remaining test cases have variable length old strings.
|
||||
|
||||
testCases = append(testCases,
|
||||
testCase{htmlUnescaper, "&amp;", "&"},
|
||||
testCase{htmlUnescaper, "<b>HTML's neat</b>", "<b>HTML's neat</b>"},
|
||||
testCase{htmlUnescaper, "", ""},
|
||||
|
||||
testCase{NewStringReplacer("a", "1", "a", "2", "xxx", "xxx"), "brad", "br1d"},
|
||||
|
||||
testCase{NewStringReplacer("a", "1", "aa", "2", "aaa", "3"), "aaaa", "1111"},
|
||||
|
||||
testCase{NewStringReplacer("aaa", "3", "aa", "2", "a", "1"), "aaaa", "31"},
|
||||
)
|
||||
|
||||
// gen1 has multiple old strings of variable length. There is no
|
||||
// overall non-empty common prefix, but some pairwise common prefixes.
|
||||
gen1 := NewStringReplacer(
|
||||
"aaa", "3[aaa]",
|
||||
"aa", "2[aa]",
|
||||
"a", "1[a]",
|
||||
"i", "i",
|
||||
"longerst", "most long",
|
||||
"longer", "medium",
|
||||
"long", "short",
|
||||
"xx", "xx",
|
||||
"x", "X",
|
||||
"X", "Y",
|
||||
"Y", "Z",
|
||||
)
|
||||
testCases = append(testCases,
|
||||
testCase{gen1, "fooaaabar", "foo3[aaa]b1[a]r"},
|
||||
testCase{gen1, "long, longerst, longer", "short, most long, medium"},
|
||||
testCase{gen1, "xxxxx", "xxxxX"},
|
||||
testCase{gen1, "XiX", "YiY"},
|
||||
testCase{gen1, "", ""},
|
||||
)
|
||||
|
||||
// gen2 has multiple old strings with no pairwise common prefix.
|
||||
gen2 := NewStringReplacer(
|
||||
"roses", "red",
|
||||
"violets", "blue",
|
||||
"sugar", "sweet",
|
||||
)
|
||||
testCases = append(testCases,
|
||||
testCase{gen2, "roses are red, violets are blue...", "red are red, blue are blue..."},
|
||||
testCase{gen2, "", ""},
|
||||
)
|
||||
|
||||
// gen3 has multiple old strings with an overall common prefix.
|
||||
gen3 := NewStringReplacer(
|
||||
"abracadabra", "poof",
|
||||
"abracadabrakazam", "splat",
|
||||
"abraham", "lincoln",
|
||||
"abrasion", "scrape",
|
||||
"abraham", "isaac",
|
||||
)
|
||||
testCases = append(testCases,
|
||||
testCase{gen3, "abracadabrakazam abraham", "poofkazam lincoln"},
|
||||
testCase{gen3, "abrasion abracad", "scrape abracad"},
|
||||
testCase{gen3, "abba abram abrasive", "abba abram abrasive"},
|
||||
testCase{gen3, "", ""},
|
||||
)
|
||||
|
||||
// foo{1,2,3,4} have multiple old strings with an overall common prefix
|
||||
// and 1- or 2- byte extensions from the common prefix.
|
||||
foo1 := NewStringReplacer(
|
||||
"foo1", "A",
|
||||
"foo2", "B",
|
||||
"foo3", "C",
|
||||
)
|
||||
foo2 := NewStringReplacer(
|
||||
"foo1", "A",
|
||||
"foo2", "B",
|
||||
"foo31", "C",
|
||||
"foo32", "D",
|
||||
)
|
||||
foo3 := NewStringReplacer(
|
||||
"foo11", "A",
|
||||
"foo12", "B",
|
||||
"foo31", "C",
|
||||
"foo32", "D",
|
||||
)
|
||||
foo4 := NewStringReplacer(
|
||||
"foo12", "B",
|
||||
"foo32", "D",
|
||||
)
|
||||
testCases = append(testCases,
|
||||
testCase{foo1, "fofoofoo12foo32oo", "fofooA2C2oo"},
|
||||
testCase{foo1, "", ""},
|
||||
|
||||
testCase{foo2, "fofoofoo12foo32oo", "fofooA2Doo"},
|
||||
testCase{foo2, "", ""},
|
||||
|
||||
testCase{foo3, "fofoofoo12foo32oo", "fofooBDoo"},
|
||||
testCase{foo3, "", ""},
|
||||
|
||||
testCase{foo4, "fofoofoo12foo32oo", "fofooBDoo"},
|
||||
testCase{foo4, "", ""},
|
||||
)
|
||||
|
||||
// genAll maps "\x00\x01\x02...\xfe\xff" to "[all]", amongst other things.
|
||||
allBytes := make([]byte, 256)
|
||||
for i := range allBytes {
|
||||
allBytes[i] = byte(i)
|
||||
}
|
||||
allString := string(allBytes)
|
||||
genAll := NewStringReplacer(
|
||||
allString, "[all]",
|
||||
"\xff", "[ff]",
|
||||
"\x00", "[00]",
|
||||
)
|
||||
testCases = append(testCases,
|
||||
testCase{genAll, allString, "[all]"},
|
||||
testCase{genAll, "a\xff" + allString + "\x00", "a[ff][all][00]"},
|
||||
testCase{genAll, "", ""},
|
||||
)
|
||||
|
||||
// Test cases with empty old strings.
|
||||
|
||||
blankToX1 := NewStringReplacer("", "X")
|
||||
blankToX2 := NewStringReplacer("", "X", "", "")
|
||||
blankHighPriority := NewStringReplacer("", "X", "o", "O")
|
||||
blankLowPriority := NewStringReplacer("o", "O", "", "X")
|
||||
blankNoOp1 := NewStringReplacer("", "")
|
||||
blankNoOp2 := NewStringReplacer("", "", "", "A")
|
||||
blankFoo := NewStringReplacer("", "X", "foobar", "R", "foobaz", "Z")
|
||||
testCases = append(testCases,
|
||||
testCase{blankToX1, "foo", "XfXoXoX"},
|
||||
testCase{blankToX1, "", "X"},
|
||||
|
||||
testCase{blankToX2, "foo", "XfXoXoX"},
|
||||
testCase{blankToX2, "", "X"},
|
||||
|
||||
testCase{blankHighPriority, "oo", "XOXOX"},
|
||||
testCase{blankHighPriority, "ii", "XiXiX"},
|
||||
testCase{blankHighPriority, "oiio", "XOXiXiXOX"},
|
||||
testCase{blankHighPriority, "iooi", "XiXOXOXiX"},
|
||||
testCase{blankHighPriority, "", "X"},
|
||||
|
||||
testCase{blankLowPriority, "oo", "OOX"},
|
||||
testCase{blankLowPriority, "ii", "XiXiX"},
|
||||
testCase{blankLowPriority, "oiio", "OXiXiOX"},
|
||||
testCase{blankLowPriority, "iooi", "XiOOXiX"},
|
||||
testCase{blankLowPriority, "", "X"},
|
||||
|
||||
testCase{blankNoOp1, "foo", "foo"},
|
||||
testCase{blankNoOp1, "", ""},
|
||||
|
||||
testCase{blankNoOp2, "foo", "foo"},
|
||||
testCase{blankNoOp2, "", ""},
|
||||
|
||||
testCase{blankFoo, "foobarfoobaz", "XRXZX"},
|
||||
testCase{blankFoo, "foobar-foobaz", "XRX-XZX"},
|
||||
testCase{blankFoo, "", "X"},
|
||||
)
|
||||
|
||||
// single string replacer
|
||||
|
||||
abcMatcher := NewStringReplacer("abc", "[match]")
|
||||
|
||||
testCases = append(testCases,
|
||||
testCase{abcMatcher, "", ""},
|
||||
testCase{abcMatcher, "ab", "ab"},
|
||||
testCase{abcMatcher, "abc", "[match]"},
|
||||
testCase{abcMatcher, "abcd", "[match]d"},
|
||||
testCase{abcMatcher, "cabcabcdabca", "c[match][match]d[match]a"},
|
||||
)
|
||||
|
||||
// Issue 6659 cases (more single string replacer)
|
||||
|
||||
noHello := NewStringReplacer("Hello", "")
|
||||
testCases = append(testCases,
|
||||
testCase{noHello, "Hello", ""},
|
||||
testCase{noHello, "Hellox", "x"},
|
||||
testCase{noHello, "xHello", "x"},
|
||||
testCase{noHello, "xHellox", "xx"},
|
||||
)
|
||||
|
||||
// No-arg test cases.
|
||||
|
||||
nop := NewStringReplacer()
|
||||
testCases = append(testCases,
|
||||
testCase{nop, "abc", "abc"},
|
||||
testCase{nop, "", ""},
|
||||
)
|
||||
|
||||
// Run the test cases.
|
||||
|
||||
for i, tc := range testCases {
|
||||
if s := tc.r.Replace(tc.in); s != tc.out {
|
||||
t.Errorf("%d. strings.Replace(%q) = %q, want %q", i, tc.in, s, tc.out)
|
||||
}
|
||||
var buf bytes.Buffer
|
||||
n, err := tc.r.WriteString(&buf, tc.in)
|
||||
if err != nil {
|
||||
t.Errorf("%d. WriteString: %v", i, err)
|
||||
continue
|
||||
}
|
||||
got := buf.String()
|
||||
if got != tc.out {
|
||||
t.Errorf("%d. WriteString(%q) wrote %q, want %q", i, tc.in, got, tc.out)
|
||||
continue
|
||||
}
|
||||
if n != len(tc.out) {
|
||||
t.Errorf("%d. WriteString(%q) wrote correct string but reported %d bytes; want %d (%q)",
|
||||
i, tc.in, n, len(tc.out), tc.out)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type errWriter struct{}
|
||||
|
||||
func (errWriter) Write(p []byte) (n int, err error) {
|
||||
return 0, fmt.Errorf("unwritable")
|
||||
}
|
||||
|
||||
func BenchmarkGenericNoMatch(b *testing.B) {
|
||||
str := strings.Repeat("A", 100) + strings.Repeat("B", 100)
|
||||
generic := NewStringReplacer("a", "A", "b", "B", "12", "123") // varying lengths forces generic
|
||||
for i := 0; i < b.N; i++ {
|
||||
generic.Replace(str)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkGenericMatch1(b *testing.B) {
|
||||
str := strings.Repeat("a", 100) + strings.Repeat("b", 100)
|
||||
generic := NewStringReplacer("a", "A", "b", "B", "12", "123")
|
||||
for i := 0; i < b.N; i++ {
|
||||
generic.Replace(str)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkGenericMatch2(b *testing.B) {
|
||||
str := strings.Repeat("It's <b>HTML</b>!", 100)
|
||||
for i := 0; i < b.N; i++ {
|
||||
htmlUnescaper.Replace(str)
|
||||
}
|
||||
}
|
||||
|
||||
func benchmarkSingleString(b *testing.B, pattern, text string) {
|
||||
r := NewStringReplacer(pattern, "[match]")
|
||||
b.SetBytes(int64(len(text)))
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
r.Replace(text)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkSingleMaxSkipping(b *testing.B) {
|
||||
benchmarkSingleString(b, strings.Repeat("b", 25), strings.Repeat("a", 10000))
|
||||
}
|
||||
|
||||
func BenchmarkSingleLongSuffixFail(b *testing.B) {
|
||||
benchmarkSingleString(b, "b"+strings.Repeat("a", 500), strings.Repeat("a", 1002))
|
||||
}
|
||||
|
||||
func BenchmarkSingleMatch(b *testing.B) {
|
||||
benchmarkSingleString(b, "abcdef", strings.Repeat("abcdefghijklmno", 1000))
|
||||
}
|
||||
|
||||
func BenchmarkByteByteNoMatch(b *testing.B) {
|
||||
str := strings.Repeat("A", 100) + strings.Repeat("B", 100)
|
||||
for i := 0; i < b.N; i++ {
|
||||
capitalLetters.Replace(str)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkByteByteMatch(b *testing.B) {
|
||||
str := strings.Repeat("a", 100) + strings.Repeat("b", 100)
|
||||
for i := 0; i < b.N; i++ {
|
||||
capitalLetters.Replace(str)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkByteStringMatch(b *testing.B) {
|
||||
str := "<" + strings.Repeat("a", 99) + strings.Repeat("b", 99) + ">"
|
||||
for i := 0; i < b.N; i++ {
|
||||
htmlEscaper.Replace(str)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkHTMLEscapeNew(b *testing.B) {
|
||||
str := "I <3 to escape HTML & other text too."
|
||||
for i := 0; i < b.N; i++ {
|
||||
htmlEscaper.Replace(str)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkHTMLEscapeOld(b *testing.B) {
|
||||
str := "I <3 to escape HTML & other text too."
|
||||
for i := 0; i < b.N; i++ {
|
||||
oldHTMLEscape(str)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkByteStringReplacerWriteString(b *testing.B) {
|
||||
str := strings.Repeat("I <3 to escape HTML & other text too.", 100)
|
||||
buf := new(bytes.Buffer)
|
||||
for i := 0; i < b.N; i++ {
|
||||
htmlEscaper.WriteString(buf, str)
|
||||
buf.Reset()
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkByteReplacerWriteString(b *testing.B) {
|
||||
str := strings.Repeat("abcdefghijklmnopqrstuvwxyz", 100)
|
||||
buf := new(bytes.Buffer)
|
||||
for i := 0; i < b.N; i++ {
|
||||
capitalLetters.WriteString(buf, str)
|
||||
buf.Reset()
|
||||
}
|
||||
}
|
||||
|
||||
// BenchmarkByteByteReplaces compares byteByteImpl against multiple Replaces.
|
||||
func BenchmarkByteByteReplaces(b *testing.B) {
|
||||
str := strings.Repeat("a", 100) + strings.Repeat("b", 100)
|
||||
for i := 0; i < b.N; i++ {
|
||||
strings.Replace(strings.Replace(str, "a", "A", -1), "b", "B", -1)
|
||||
}
|
||||
}
|
|
@ -1,105 +0,0 @@
|
|||
package misspell
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
// Test suite partiall from https://mathiasbynens.be/demo/url-regex
|
||||
//
|
||||
func TestStripURL(t *testing.T) {
|
||||
cases := []string{
|
||||
"HTTP://FOO.COM/BLAH_BLAH",
|
||||
"http://foo.com/blah_blah",
|
||||
"http://foo.com/blah_blah/",
|
||||
"http://foo.com/blah_blah_(wikipedia)",
|
||||
"http://foo.com/blah_blah_(wikipedia)_(again)",
|
||||
"http://www.example.com/wpstyle/?p=364",
|
||||
"https://www.example.com/foo/?bar=baz&inga=42&quux",
|
||||
"http://✪df.ws/123",
|
||||
"http://userid:password@example.com:8080",
|
||||
"http://userid:password@example.com:8080/",
|
||||
"http://userid@example.com",
|
||||
"http://userid@example.com/",
|
||||
"http://userid@example.com:8080",
|
||||
"http://userid@example.com:8080/",
|
||||
"http://userid:password@example.com",
|
||||
"http://userid:password@example.com/",
|
||||
"http://142.42.1.1/",
|
||||
"http://142.42.1.1:8080/",
|
||||
"http://➡.ws/䨹",
|
||||
"http://⌘.ws",
|
||||
"http://⌘.ws/",
|
||||
"http://foo.com/blah_(wikipedia)#cite-1",
|
||||
"http://foo.com/blah_(wikipedia)_blah#cite-1",
|
||||
"http://foo.com/unicode_(✪)_in_parens",
|
||||
"http://foo.com/(something)?after=parens",
|
||||
"http://☺.damowmow.com/a",
|
||||
"http://code.google.com/events/#&product=browser",
|
||||
"http://j.mp",
|
||||
"ftp://foo.bar/baz",
|
||||
"http://foo.bar/?q=Test%20URL-encoded%20stuff",
|
||||
"http://مثال.إختبار",
|
||||
"http://例子.测试",
|
||||
"http://उदाहरण.परीक्षा",
|
||||
"http://-.~_!$&'()*+,;=:%40:80%2f::::::@example.com",
|
||||
"http://1337.net",
|
||||
"http://a.b-c.de",
|
||||
"http://223.255.255.254",
|
||||
}
|
||||
|
||||
for num, tt := range cases {
|
||||
got := strings.TrimSpace(StripURL(tt))
|
||||
if len(got) != 0 {
|
||||
t.Errorf("case %d: unable to match %q", num, tt)
|
||||
}
|
||||
}
|
||||
|
||||
cases = []string{
|
||||
"http://",
|
||||
"http://.",
|
||||
"http://..",
|
||||
"http://../",
|
||||
"http://?",
|
||||
"http://??",
|
||||
"http://??/",
|
||||
"http://#",
|
||||
"http://##",
|
||||
"http://##/",
|
||||
"http://foo.bar?q=Spaces should be encoded",
|
||||
"//",
|
||||
"//a",
|
||||
"///a",
|
||||
"///",
|
||||
"http:///a",
|
||||
"foo.com",
|
||||
"rdar://1234",
|
||||
"h://test",
|
||||
"http:// shouldfail.com",
|
||||
":// should fail",
|
||||
"http://foo.bar/foo(bar)baz quux",
|
||||
"ftps://foo.bar/",
|
||||
//"http://-error-.invalid/",
|
||||
//"http://a.b--c.de/",
|
||||
//"http://-a.b.co",
|
||||
//"http://a.b-.co",
|
||||
//"http://0.0.0.0",
|
||||
//"http://10.1.1.0",
|
||||
//"http://10.1.1.255",
|
||||
//"http://224.1.1.1",
|
||||
//"http://1.1.1.1.1",
|
||||
//"http://123.123.123",
|
||||
//"http://3628126748",
|
||||
"http://.www.foo.bar/",
|
||||
//"http://www.foo.bar./",
|
||||
"http://.www.foo.bar./",
|
||||
//"http://10.1.1.1",
|
||||
}
|
||||
|
||||
for num, tt := range cases {
|
||||
got := strings.TrimSpace(StripURL(tt))
|
||||
if len(got) == 0 {
|
||||
t.Errorf("case %d: incorrect match %q", num, tt)
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,35 +0,0 @@
|
|||
package misspell
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"testing"
|
||||
)
|
||||
|
||||
type sortByLen []string
|
||||
|
||||
func (a sortByLen) Len() int { return len(a) }
|
||||
func (a sortByLen) Swap(i, j int) { a[i], a[j] = a[j], a[i] }
|
||||
func (a sortByLen) Less(i, j int) bool {
|
||||
if len(a[i]) == len(a[j]) {
|
||||
// if words are same size, then use
|
||||
// normal alphabetical order
|
||||
return a[i] < a[j]
|
||||
}
|
||||
// INVERTED -- biggest words first
|
||||
return len(a[i]) > len(a[j])
|
||||
}
|
||||
|
||||
func TestWordSort(t *testing.T) {
|
||||
if len(DictMain)%2 == 1 {
|
||||
t.Errorf("Dictionary is a not a multiple of 2")
|
||||
}
|
||||
words := make([]string, 0, len(DictMain)/2)
|
||||
for i := 0; i < len(DictMain); i += 2 {
|
||||
words = append(words, DictMain[i])
|
||||
}
|
||||
if !sort.IsSorted(sortByLen(words)) {
|
||||
t.Errorf("Words not sorted by len, by alpha!")
|
||||
t.Errorf("Words.go is autogenerated -- do not edit.")
|
||||
t.Errorf("File issue instead.")
|
||||
}
|
||||
}
|
|
@ -1,9 +0,0 @@
|
|||
language: go
|
||||
|
||||
go:
|
||||
- 1.4
|
||||
- 1.5
|
||||
- 1.6
|
||||
- tip
|
||||
|
||||
go_import_path: gopkg.in/yaml.v2
|
|
@ -0,0 +1,13 @@
|
|||
Copyright 2011-2016 Canonical Ltd.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
|
@ -1,133 +0,0 @@
|
|||
# YAML support for the Go language
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
The yaml package enables Go programs to comfortably encode and decode YAML
|
||||
values. It was developed within [Canonical](https://www.canonical.com) as
|
||||
part of the [juju](https://juju.ubuntu.com) project, and is based on a
|
||||
pure Go port of the well-known [libyaml](http://pyyaml.org/wiki/LibYAML)
|
||||
C library to parse and generate YAML data quickly and reliably.
|
||||
|
||||
Compatibility
|
||||
-------------
|
||||
|
||||
The yaml package supports most of YAML 1.1 and 1.2, including support for
|
||||
anchors, tags, map merging, etc. Multi-document unmarshalling is not yet
|
||||
implemented, and base-60 floats from YAML 1.1 are purposefully not
|
||||
supported since they're a poor design and are gone in YAML 1.2.
|
||||
|
||||
Installation and usage
|
||||
----------------------
|
||||
|
||||
The import path for the package is *gopkg.in/yaml.v2*.
|
||||
|
||||
To install it, run:
|
||||
|
||||
go get gopkg.in/yaml.v2
|
||||
|
||||
API documentation
|
||||
-----------------
|
||||
|
||||
If opened in a browser, the import path itself leads to the API documentation:
|
||||
|
||||
* [https://gopkg.in/yaml.v2](https://gopkg.in/yaml.v2)
|
||||
|
||||
API stability
|
||||
-------------
|
||||
|
||||
The package API for yaml v2 will remain stable as described in [gopkg.in](https://gopkg.in).
|
||||
|
||||
|
||||
License
|
||||
-------
|
||||
|
||||
The yaml package is licensed under the Apache License 2.0. Please see the LICENSE file for details.
|
||||
|
||||
|
||||
Example
|
||||
-------
|
||||
|
||||
Some more examples can be found in the "examples" folder.
|
||||
|
||||
```Go
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
var data = `
|
||||
a: Easy!
|
||||
b:
|
||||
c: 2
|
||||
d: [3, 4]
|
||||
`
|
||||
|
||||
type T struct {
|
||||
A string
|
||||
B struct {
|
||||
RenamedC int `yaml:"c"`
|
||||
D []int `yaml:",flow"`
|
||||
}
|
||||
}
|
||||
|
||||
func main() {
|
||||
t := T{}
|
||||
|
||||
err := yaml.Unmarshal([]byte(data), &t)
|
||||
if err != nil {
|
||||
log.Fatalf("error: %v", err)
|
||||
}
|
||||
fmt.Printf("--- t:\n%v\n\n", t)
|
||||
|
||||
d, err := yaml.Marshal(&t)
|
||||
if err != nil {
|
||||
log.Fatalf("error: %v", err)
|
||||
}
|
||||
fmt.Printf("--- t dump:\n%s\n\n", string(d))
|
||||
|
||||
m := make(map[interface{}]interface{})
|
||||
|
||||
err = yaml.Unmarshal([]byte(data), &m)
|
||||
if err != nil {
|
||||
log.Fatalf("error: %v", err)
|
||||
}
|
||||
fmt.Printf("--- m:\n%v\n\n", m)
|
||||
|
||||
d, err = yaml.Marshal(&m)
|
||||
if err != nil {
|
||||
log.Fatalf("error: %v", err)
|
||||
}
|
||||
fmt.Printf("--- m dump:\n%s\n\n", string(d))
|
||||
}
|
||||
```
|
||||
|
||||
This example will generate the following output:
|
||||
|
||||
```
|
||||
--- t:
|
||||
{Easy! {2 [3 4]}}
|
||||
|
||||
--- t dump:
|
||||
a: Easy!
|
||||
b:
|
||||
c: 2
|
||||
d: [3, 4]
|
||||
|
||||
|
||||
--- m:
|
||||
map[a:Easy! b:map[c:2 d:[3 4]]]
|
||||
|
||||
--- m dump:
|
||||
a: Easy!
|
||||
b:
|
||||
c: 2
|
||||
d:
|
||||
- 3
|
||||
- 4
|
||||
```
|
||||
|
|
@ -2,7 +2,6 @@ package yaml
|
|||
|
||||
import (
|
||||
"io"
|
||||
"os"
|
||||
)
|
||||
|
||||
func yaml_insert_token(parser *yaml_parser_t, pos int, token *yaml_token_t) {
|
||||
|
@ -48,9 +47,9 @@ func yaml_string_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err
|
|||
return n, nil
|
||||
}
|
||||
|
||||
// File read handler.
|
||||
func yaml_file_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) {
|
||||
return parser.input_file.Read(buffer)
|
||||
// Reader read handler.
|
||||
func yaml_reader_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) {
|
||||
return parser.input_reader.Read(buffer)
|
||||
}
|
||||
|
||||
// Set a string input.
|
||||
|
@ -64,12 +63,12 @@ func yaml_parser_set_input_string(parser *yaml_parser_t, input []byte) {
|
|||
}
|
||||
|
||||
// Set a file input.
|
||||
func yaml_parser_set_input_file(parser *yaml_parser_t, file *os.File) {
|
||||
func yaml_parser_set_input_reader(parser *yaml_parser_t, r io.Reader) {
|
||||
if parser.read_handler != nil {
|
||||
panic("must set the input source only once")
|
||||
}
|
||||
parser.read_handler = yaml_file_read_handler
|
||||
parser.input_file = file
|
||||
parser.read_handler = yaml_reader_read_handler
|
||||
parser.input_reader = r
|
||||
}
|
||||
|
||||
// Set the source encoding.
|
||||
|
@ -81,14 +80,13 @@ func yaml_parser_set_encoding(parser *yaml_parser_t, encoding yaml_encoding_t) {
|
|||
}
|
||||
|
||||
// Create a new emitter object.
|
||||
func yaml_emitter_initialize(emitter *yaml_emitter_t) bool {
|
||||
func yaml_emitter_initialize(emitter *yaml_emitter_t) {
|
||||
*emitter = yaml_emitter_t{
|
||||
buffer: make([]byte, output_buffer_size),
|
||||
raw_buffer: make([]byte, 0, output_raw_buffer_size),
|
||||
states: make([]yaml_emitter_state_t, 0, initial_stack_size),
|
||||
events: make([]yaml_event_t, 0, initial_queue_size),
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Destroy an emitter object.
|
||||
|
@ -102,9 +100,10 @@ func yaml_string_write_handler(emitter *yaml_emitter_t, buffer []byte) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// File write handler.
|
||||
func yaml_file_write_handler(emitter *yaml_emitter_t, buffer []byte) error {
|
||||
_, err := emitter.output_file.Write(buffer)
|
||||
// yaml_writer_write_handler uses emitter.output_writer to write the
|
||||
// emitted text.
|
||||
func yaml_writer_write_handler(emitter *yaml_emitter_t, buffer []byte) error {
|
||||
_, err := emitter.output_writer.Write(buffer)
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -118,12 +117,12 @@ func yaml_emitter_set_output_string(emitter *yaml_emitter_t, output_buffer *[]by
|
|||
}
|
||||
|
||||
// Set a file output.
|
||||
func yaml_emitter_set_output_file(emitter *yaml_emitter_t, file io.Writer) {
|
||||
func yaml_emitter_set_output_writer(emitter *yaml_emitter_t, w io.Writer) {
|
||||
if emitter.write_handler != nil {
|
||||
panic("must set the output target only once")
|
||||
}
|
||||
emitter.write_handler = yaml_file_write_handler
|
||||
emitter.output_file = file
|
||||
emitter.write_handler = yaml_writer_write_handler
|
||||
emitter.output_writer = w
|
||||
}
|
||||
|
||||
// Set the output encoding.
|
||||
|
@ -252,41 +251,41 @@ func yaml_emitter_set_break(emitter *yaml_emitter_t, line_break yaml_break_t) {
|
|||
//
|
||||
|
||||
// Create STREAM-START.
|
||||
func yaml_stream_start_event_initialize(event *yaml_event_t, encoding yaml_encoding_t) bool {
|
||||
func yaml_stream_start_event_initialize(event *yaml_event_t, encoding yaml_encoding_t) {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_STREAM_START_EVENT,
|
||||
encoding: encoding,
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Create STREAM-END.
|
||||
func yaml_stream_end_event_initialize(event *yaml_event_t) bool {
|
||||
func yaml_stream_end_event_initialize(event *yaml_event_t) {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_STREAM_END_EVENT,
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Create DOCUMENT-START.
|
||||
func yaml_document_start_event_initialize(event *yaml_event_t, version_directive *yaml_version_directive_t,
|
||||
tag_directives []yaml_tag_directive_t, implicit bool) bool {
|
||||
func yaml_document_start_event_initialize(
|
||||
event *yaml_event_t,
|
||||
version_directive *yaml_version_directive_t,
|
||||
tag_directives []yaml_tag_directive_t,
|
||||
implicit bool,
|
||||
) {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_DOCUMENT_START_EVENT,
|
||||
version_directive: version_directive,
|
||||
tag_directives: tag_directives,
|
||||
implicit: implicit,
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Create DOCUMENT-END.
|
||||
func yaml_document_end_event_initialize(event *yaml_event_t, implicit bool) bool {
|
||||
func yaml_document_end_event_initialize(event *yaml_event_t, implicit bool) {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_DOCUMENT_END_EVENT,
|
||||
implicit: implicit,
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
///*
|
||||
|
@ -348,7 +347,7 @@ func yaml_sequence_end_event_initialize(event *yaml_event_t) bool {
|
|||
}
|
||||
|
||||
// Create MAPPING-START.
|
||||
func yaml_mapping_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_mapping_style_t) bool {
|
||||
func yaml_mapping_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_mapping_style_t) {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_MAPPING_START_EVENT,
|
||||
anchor: anchor,
|
||||
|
@ -356,15 +355,13 @@ func yaml_mapping_start_event_initialize(event *yaml_event_t, anchor, tag []byte
|
|||
implicit: implicit,
|
||||
style: yaml_style_t(style),
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Create MAPPING-END.
|
||||
func yaml_mapping_end_event_initialize(event *yaml_event_t) bool {
|
||||
func yaml_mapping_end_event_initialize(event *yaml_event_t) {
|
||||
*event = yaml_event_t{
|
||||
typ: yaml_MAPPING_END_EVENT,
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// Destroy an event object.
|
||||
|
@ -471,7 +468,7 @@ func yaml_event_delete(event *yaml_event_t) {
|
|||
// } context
|
||||
// tag_directive *yaml_tag_directive_t
|
||||
//
|
||||
// context.error = YAML_NO_ERROR // Eliminate a compliler warning.
|
||||
// context.error = YAML_NO_ERROR // Eliminate a compiler warning.
|
||||
//
|
||||
// assert(document) // Non-NULL document object is expected.
|
||||
//
|
||||
|
|
|
@ -4,6 +4,7 @@ import (
|
|||
"encoding"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"reflect"
|
||||
"strconv"
|
||||
|
@ -22,19 +23,22 @@ type node struct {
|
|||
kind int
|
||||
line, column int
|
||||
tag string
|
||||
value string
|
||||
implicit bool
|
||||
children []*node
|
||||
anchors map[string]*node
|
||||
// For an alias node, alias holds the resolved alias.
|
||||
alias *node
|
||||
value string
|
||||
implicit bool
|
||||
children []*node
|
||||
anchors map[string]*node
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
// Parser, produces a node tree out of a libyaml event stream.
|
||||
|
||||
type parser struct {
|
||||
parser yaml_parser_t
|
||||
event yaml_event_t
|
||||
doc *node
|
||||
parser yaml_parser_t
|
||||
event yaml_event_t
|
||||
doc *node
|
||||
doneInit bool
|
||||
}
|
||||
|
||||
func newParser(b []byte) *parser {
|
||||
|
@ -42,21 +46,30 @@ func newParser(b []byte) *parser {
|
|||
if !yaml_parser_initialize(&p.parser) {
|
||||
panic("failed to initialize YAML emitter")
|
||||
}
|
||||
|
||||
if len(b) == 0 {
|
||||
b = []byte{'\n'}
|
||||
}
|
||||
|
||||
yaml_parser_set_input_string(&p.parser, b)
|
||||
|
||||
p.skip()
|
||||
if p.event.typ != yaml_STREAM_START_EVENT {
|
||||
panic("expected stream start event, got " + strconv.Itoa(int(p.event.typ)))
|
||||
}
|
||||
p.skip()
|
||||
return &p
|
||||
}
|
||||
|
||||
func newParserFromReader(r io.Reader) *parser {
|
||||
p := parser{}
|
||||
if !yaml_parser_initialize(&p.parser) {
|
||||
panic("failed to initialize YAML emitter")
|
||||
}
|
||||
yaml_parser_set_input_reader(&p.parser, r)
|
||||
return &p
|
||||
}
|
||||
|
||||
func (p *parser) init() {
|
||||
if p.doneInit {
|
||||
return
|
||||
}
|
||||
p.expect(yaml_STREAM_START_EVENT)
|
||||
p.doneInit = true
|
||||
}
|
||||
|
||||
func (p *parser) destroy() {
|
||||
if p.event.typ != yaml_NO_EVENT {
|
||||
yaml_event_delete(&p.event)
|
||||
|
@ -64,16 +77,35 @@ func (p *parser) destroy() {
|
|||
yaml_parser_delete(&p.parser)
|
||||
}
|
||||
|
||||
func (p *parser) skip() {
|
||||
if p.event.typ != yaml_NO_EVENT {
|
||||
if p.event.typ == yaml_STREAM_END_EVENT {
|
||||
failf("attempted to go past the end of stream; corrupted value?")
|
||||
// expect consumes an event from the event stream and
|
||||
// checks that it's of the expected type.
|
||||
func (p *parser) expect(e yaml_event_type_t) {
|
||||
if p.event.typ == yaml_NO_EVENT {
|
||||
if !yaml_parser_parse(&p.parser, &p.event) {
|
||||
p.fail()
|
||||
}
|
||||
yaml_event_delete(&p.event)
|
||||
}
|
||||
if p.event.typ == yaml_STREAM_END_EVENT {
|
||||
failf("attempted to go past the end of stream; corrupted value?")
|
||||
}
|
||||
if p.event.typ != e {
|
||||
p.parser.problem = fmt.Sprintf("expected %s event but got %s", e, p.event.typ)
|
||||
p.fail()
|
||||
}
|
||||
yaml_event_delete(&p.event)
|
||||
p.event.typ = yaml_NO_EVENT
|
||||
}
|
||||
|
||||
// peek peeks at the next event in the event stream,
|
||||
// puts the results into p.event and returns the event type.
|
||||
func (p *parser) peek() yaml_event_type_t {
|
||||
if p.event.typ != yaml_NO_EVENT {
|
||||
return p.event.typ
|
||||
}
|
||||
if !yaml_parser_parse(&p.parser, &p.event) {
|
||||
p.fail()
|
||||
}
|
||||
return p.event.typ
|
||||
}
|
||||
|
||||
func (p *parser) fail() {
|
||||
|
@ -81,6 +113,10 @@ func (p *parser) fail() {
|
|||
var line int
|
||||
if p.parser.problem_mark.line != 0 {
|
||||
line = p.parser.problem_mark.line
|
||||
// Scanner errors don't iterate line before returning error
|
||||
if p.parser.error == yaml_SCANNER_ERROR {
|
||||
line++
|
||||
}
|
||||
} else if p.parser.context_mark.line != 0 {
|
||||
line = p.parser.context_mark.line
|
||||
}
|
||||
|
@ -103,7 +139,8 @@ func (p *parser) anchor(n *node, anchor []byte) {
|
|||
}
|
||||
|
||||
func (p *parser) parse() *node {
|
||||
switch p.event.typ {
|
||||
p.init()
|
||||
switch p.peek() {
|
||||
case yaml_SCALAR_EVENT:
|
||||
return p.scalar()
|
||||
case yaml_ALIAS_EVENT:
|
||||
|
@ -118,7 +155,7 @@ func (p *parser) parse() *node {
|
|||
// Happens when attempting to decode an empty buffer.
|
||||
return nil
|
||||
default:
|
||||
panic("attempted to parse unknown event: " + strconv.Itoa(int(p.event.typ)))
|
||||
panic("attempted to parse unknown event: " + p.event.typ.String())
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -134,19 +171,20 @@ func (p *parser) document() *node {
|
|||
n := p.node(documentNode)
|
||||
n.anchors = make(map[string]*node)
|
||||
p.doc = n
|
||||
p.skip()
|
||||
p.expect(yaml_DOCUMENT_START_EVENT)
|
||||
n.children = append(n.children, p.parse())
|
||||
if p.event.typ != yaml_DOCUMENT_END_EVENT {
|
||||
panic("expected end of document event but got " + strconv.Itoa(int(p.event.typ)))
|
||||
}
|
||||
p.skip()
|
||||
p.expect(yaml_DOCUMENT_END_EVENT)
|
||||
return n
|
||||
}
|
||||
|
||||
func (p *parser) alias() *node {
|
||||
n := p.node(aliasNode)
|
||||
n.value = string(p.event.anchor)
|
||||
p.skip()
|
||||
n.alias = p.doc.anchors[n.value]
|
||||
if n.alias == nil {
|
||||
failf("unknown anchor '%s' referenced", n.value)
|
||||
}
|
||||
p.expect(yaml_ALIAS_EVENT)
|
||||
return n
|
||||
}
|
||||
|
||||
|
@ -156,29 +194,29 @@ func (p *parser) scalar() *node {
|
|||
n.tag = string(p.event.tag)
|
||||
n.implicit = p.event.implicit
|
||||
p.anchor(n, p.event.anchor)
|
||||
p.skip()
|
||||
p.expect(yaml_SCALAR_EVENT)
|
||||
return n
|
||||
}
|
||||
|
||||
func (p *parser) sequence() *node {
|
||||
n := p.node(sequenceNode)
|
||||
p.anchor(n, p.event.anchor)
|
||||
p.skip()
|
||||
for p.event.typ != yaml_SEQUENCE_END_EVENT {
|
||||
p.expect(yaml_SEQUENCE_START_EVENT)
|
||||
for p.peek() != yaml_SEQUENCE_END_EVENT {
|
||||
n.children = append(n.children, p.parse())
|
||||
}
|
||||
p.skip()
|
||||
p.expect(yaml_SEQUENCE_END_EVENT)
|
||||
return n
|
||||
}
|
||||
|
||||
func (p *parser) mapping() *node {
|
||||
n := p.node(mappingNode)
|
||||
p.anchor(n, p.event.anchor)
|
||||
p.skip()
|
||||
for p.event.typ != yaml_MAPPING_END_EVENT {
|
||||
p.expect(yaml_MAPPING_START_EVENT)
|
||||
for p.peek() != yaml_MAPPING_END_EVENT {
|
||||
n.children = append(n.children, p.parse(), p.parse())
|
||||
}
|
||||
p.skip()
|
||||
p.expect(yaml_MAPPING_END_EVENT)
|
||||
return n
|
||||
}
|
||||
|
||||
|
@ -187,7 +225,7 @@ func (p *parser) mapping() *node {
|
|||
|
||||
type decoder struct {
|
||||
doc *node
|
||||
aliases map[string]bool
|
||||
aliases map[*node]bool
|
||||
mapType reflect.Type
|
||||
terrors []string
|
||||
strict bool
|
||||
|
@ -198,11 +236,13 @@ var (
|
|||
durationType = reflect.TypeOf(time.Duration(0))
|
||||
defaultMapType = reflect.TypeOf(map[interface{}]interface{}{})
|
||||
ifaceType = defaultMapType.Elem()
|
||||
timeType = reflect.TypeOf(time.Time{})
|
||||
ptrTimeType = reflect.TypeOf(&time.Time{})
|
||||
)
|
||||
|
||||
func newDecoder(strict bool) *decoder {
|
||||
d := &decoder{mapType: defaultMapType, strict: strict}
|
||||
d.aliases = make(map[string]bool)
|
||||
d.aliases = make(map[*node]bool)
|
||||
return d
|
||||
}
|
||||
|
||||
|
@ -251,7 +291,7 @@ func (d *decoder) callUnmarshaler(n *node, u Unmarshaler) (good bool) {
|
|||
//
|
||||
// If n holds a null value, prepare returns before doing anything.
|
||||
func (d *decoder) prepare(n *node, out reflect.Value) (newout reflect.Value, unmarshaled, good bool) {
|
||||
if n.tag == yaml_NULL_TAG || n.kind == scalarNode && n.tag == "" && (n.value == "null" || n.value == "" && n.implicit) {
|
||||
if n.tag == yaml_NULL_TAG || n.kind == scalarNode && n.tag == "" && (n.value == "null" || n.value == "~" || n.value == "" && n.implicit) {
|
||||
return out, false, false
|
||||
}
|
||||
again := true
|
||||
|
@ -308,16 +348,13 @@ func (d *decoder) document(n *node, out reflect.Value) (good bool) {
|
|||
}
|
||||
|
||||
func (d *decoder) alias(n *node, out reflect.Value) (good bool) {
|
||||
an, ok := d.doc.anchors[n.value]
|
||||
if !ok {
|
||||
failf("unknown anchor '%s' referenced", n.value)
|
||||
}
|
||||
if d.aliases[n.value] {
|
||||
if d.aliases[n] {
|
||||
// TODO this could actually be allowed in some circumstances.
|
||||
failf("anchor '%s' value contains itself", n.value)
|
||||
}
|
||||
d.aliases[n.value] = true
|
||||
good = d.unmarshal(an, out)
|
||||
delete(d.aliases, n.value)
|
||||
d.aliases[n] = true
|
||||
good = d.unmarshal(n.alias, out)
|
||||
delete(d.aliases, n)
|
||||
return good
|
||||
}
|
||||
|
||||
|
@ -329,7 +366,7 @@ func resetMap(out reflect.Value) {
|
|||
}
|
||||
}
|
||||
|
||||
func (d *decoder) scalar(n *node, out reflect.Value) (good bool) {
|
||||
func (d *decoder) scalar(n *node, out reflect.Value) bool {
|
||||
var tag string
|
||||
var resolved interface{}
|
||||
if n.tag == "" && !n.implicit {
|
||||
|
@ -353,9 +390,26 @@ func (d *decoder) scalar(n *node, out reflect.Value) (good bool) {
|
|||
}
|
||||
return true
|
||||
}
|
||||
if s, ok := resolved.(string); ok && out.CanAddr() {
|
||||
if u, ok := out.Addr().Interface().(encoding.TextUnmarshaler); ok {
|
||||
err := u.UnmarshalText([]byte(s))
|
||||
if resolvedv := reflect.ValueOf(resolved); out.Type() == resolvedv.Type() {
|
||||
// We've resolved to exactly the type we want, so use that.
|
||||
out.Set(resolvedv)
|
||||
return true
|
||||
}
|
||||
// Perhaps we can use the value as a TextUnmarshaler to
|
||||
// set its value.
|
||||
if out.CanAddr() {
|
||||
u, ok := out.Addr().Interface().(encoding.TextUnmarshaler)
|
||||
if ok {
|
||||
var text []byte
|
||||
if tag == yaml_BINARY_TAG {
|
||||
text = []byte(resolved.(string))
|
||||
} else {
|
||||
// We let any value be unmarshaled into TextUnmarshaler.
|
||||
// That might be more lax than we'd like, but the
|
||||
// TextUnmarshaler itself should bowl out any dubious values.
|
||||
text = []byte(n.value)
|
||||
}
|
||||
err := u.UnmarshalText(text)
|
||||
if err != nil {
|
||||
fail(err)
|
||||
}
|
||||
|
@ -366,46 +420,54 @@ func (d *decoder) scalar(n *node, out reflect.Value) (good bool) {
|
|||
case reflect.String:
|
||||
if tag == yaml_BINARY_TAG {
|
||||
out.SetString(resolved.(string))
|
||||
good = true
|
||||
} else if resolved != nil {
|
||||
return true
|
||||
}
|
||||
if resolved != nil {
|
||||
out.SetString(n.value)
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
case reflect.Interface:
|
||||
if resolved == nil {
|
||||
out.Set(reflect.Zero(out.Type()))
|
||||
} else if tag == yaml_TIMESTAMP_TAG {
|
||||
// It looks like a timestamp but for backward compatibility
|
||||
// reasons we set it as a string, so that code that unmarshals
|
||||
// timestamp-like values into interface{} will continue to
|
||||
// see a string and not a time.Time.
|
||||
// TODO(v3) Drop this.
|
||||
out.Set(reflect.ValueOf(n.value))
|
||||
} else {
|
||||
out.Set(reflect.ValueOf(resolved))
|
||||
}
|
||||
good = true
|
||||
return true
|
||||
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
|
||||
switch resolved := resolved.(type) {
|
||||
case int:
|
||||
if !out.OverflowInt(int64(resolved)) {
|
||||
out.SetInt(int64(resolved))
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
case int64:
|
||||
if !out.OverflowInt(resolved) {
|
||||
out.SetInt(resolved)
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
case uint64:
|
||||
if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) {
|
||||
out.SetInt(int64(resolved))
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
case float64:
|
||||
if resolved <= math.MaxInt64 && !out.OverflowInt(int64(resolved)) {
|
||||
out.SetInt(int64(resolved))
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
case string:
|
||||
if out.Type() == durationType {
|
||||
d, err := time.ParseDuration(resolved)
|
||||
if err == nil {
|
||||
out.SetInt(int64(d))
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -414,44 +476,49 @@ func (d *decoder) scalar(n *node, out reflect.Value) (good bool) {
|
|||
case int:
|
||||
if resolved >= 0 && !out.OverflowUint(uint64(resolved)) {
|
||||
out.SetUint(uint64(resolved))
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
case int64:
|
||||
if resolved >= 0 && !out.OverflowUint(uint64(resolved)) {
|
||||
out.SetUint(uint64(resolved))
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
case uint64:
|
||||
if !out.OverflowUint(uint64(resolved)) {
|
||||
out.SetUint(uint64(resolved))
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
case float64:
|
||||
if resolved <= math.MaxUint64 && !out.OverflowUint(uint64(resolved)) {
|
||||
out.SetUint(uint64(resolved))
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
}
|
||||
case reflect.Bool:
|
||||
switch resolved := resolved.(type) {
|
||||
case bool:
|
||||
out.SetBool(resolved)
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
case reflect.Float32, reflect.Float64:
|
||||
switch resolved := resolved.(type) {
|
||||
case int:
|
||||
out.SetFloat(float64(resolved))
|
||||
good = true
|
||||
return true
|
||||
case int64:
|
||||
out.SetFloat(float64(resolved))
|
||||
good = true
|
||||
return true
|
||||
case uint64:
|
||||
out.SetFloat(float64(resolved))
|
||||
good = true
|
||||
return true
|
||||
case float64:
|
||||
out.SetFloat(resolved)
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
case reflect.Struct:
|
||||
if resolvedv := reflect.ValueOf(resolved); out.Type() == resolvedv.Type() {
|
||||
out.Set(resolvedv)
|
||||
return true
|
||||
}
|
||||
case reflect.Ptr:
|
||||
if out.Type().Elem() == reflect.TypeOf(resolved) {
|
||||
|
@ -459,13 +526,11 @@ func (d *decoder) scalar(n *node, out reflect.Value) (good bool) {
|
|||
elem := reflect.New(out.Type().Elem())
|
||||
elem.Elem().Set(reflect.ValueOf(resolved))
|
||||
out.Set(elem)
|
||||
good = true
|
||||
return true
|
||||
}
|
||||
}
|
||||
if !good {
|
||||
d.terror(n, tag, out)
|
||||
}
|
||||
return good
|
||||
d.terror(n, tag, out)
|
||||
return false
|
||||
}
|
||||
|
||||
func settableValueOf(i interface{}) reflect.Value {
|
||||
|
@ -482,6 +547,10 @@ func (d *decoder) sequence(n *node, out reflect.Value) (good bool) {
|
|||
switch out.Kind() {
|
||||
case reflect.Slice:
|
||||
out.Set(reflect.MakeSlice(out.Type(), l, l))
|
||||
case reflect.Array:
|
||||
if l != out.Len() {
|
||||
failf("invalid array: want %d elements but got %d", out.Len(), l)
|
||||
}
|
||||
case reflect.Interface:
|
||||
// No type hints. Will have to use a generic sequence.
|
||||
iface = out
|
||||
|
@ -500,7 +569,9 @@ func (d *decoder) sequence(n *node, out reflect.Value) (good bool) {
|
|||
j++
|
||||
}
|
||||
}
|
||||
out.Set(out.Slice(0, j))
|
||||
if out.Kind() != reflect.Array {
|
||||
out.Set(out.Slice(0, j))
|
||||
}
|
||||
if iface.IsValid() {
|
||||
iface.Set(out)
|
||||
}
|
||||
|
@ -561,7 +632,7 @@ func (d *decoder) mapping(n *node, out reflect.Value) (good bool) {
|
|||
}
|
||||
e := reflect.New(et).Elem()
|
||||
if d.unmarshal(n.children[i+1], e) {
|
||||
out.SetMapIndex(k, e)
|
||||
d.setMapIndex(n.children[i+1], out, k, e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -569,6 +640,14 @@ func (d *decoder) mapping(n *node, out reflect.Value) (good bool) {
|
|||
return true
|
||||
}
|
||||
|
||||
func (d *decoder) setMapIndex(n *node, out, k, v reflect.Value) {
|
||||
if d.strict && out.MapIndex(k) != zeroValue {
|
||||
d.terrors = append(d.terrors, fmt.Sprintf("line %d: key %#v already set in map", n.line+1, k.Interface()))
|
||||
return
|
||||
}
|
||||
out.SetMapIndex(k, v)
|
||||
}
|
||||
|
||||
func (d *decoder) mappingSlice(n *node, out reflect.Value) (good bool) {
|
||||
outt := out.Type()
|
||||
if outt.Elem() != mapItemType {
|
||||
|
@ -616,6 +695,10 @@ func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) {
|
|||
elemType = inlineMap.Type().Elem()
|
||||
}
|
||||
|
||||
var doneFields []bool
|
||||
if d.strict {
|
||||
doneFields = make([]bool, len(sinfo.FieldsList))
|
||||
}
|
||||
for i := 0; i < l; i += 2 {
|
||||
ni := n.children[i]
|
||||
if isMerge(ni) {
|
||||
|
@ -626,6 +709,13 @@ func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) {
|
|||
continue
|
||||
}
|
||||
if info, ok := sinfo.FieldsMap[name.String()]; ok {
|
||||
if d.strict {
|
||||
if doneFields[info.Id] {
|
||||
d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s already set in type %s", ni.line+1, name.String(), out.Type()))
|
||||
continue
|
||||
}
|
||||
doneFields[info.Id] = true
|
||||
}
|
||||
var field reflect.Value
|
||||
if info.Inline == nil {
|
||||
field = out.Field(info.Num)
|
||||
|
@ -639,9 +729,9 @@ func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) {
|
|||
}
|
||||
value := reflect.New(elemType).Elem()
|
||||
d.unmarshal(n.children[i+1], value)
|
||||
inlineMap.SetMapIndex(name, value)
|
||||
d.setMapIndex(n.children[i+1], inlineMap, name, value)
|
||||
} else if d.strict {
|
||||
d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s not found in struct %s", n.line+1, name.String(), out.Type()))
|
||||
d.terrors = append(d.terrors, fmt.Sprintf("line %d: field %s not found in type %s", ni.line+1, name.String(), out.Type()))
|
||||
}
|
||||
}
|
||||
return true
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -2,6 +2,7 @@ package yaml
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// Flush the buffer if needed.
|
||||
|
@ -664,7 +665,7 @@ func yaml_emitter_emit_node(emitter *yaml_emitter_t, event *yaml_event_t,
|
|||
return yaml_emitter_emit_mapping_start(emitter, event)
|
||||
default:
|
||||
return yaml_emitter_set_emitter_error(emitter,
|
||||
"expected SCALAR, SEQUENCE-START, MAPPING-START, or ALIAS")
|
||||
fmt.Sprintf("expected SCALAR, SEQUENCE-START, MAPPING-START, or ALIAS, but got %v", event.typ))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -842,7 +843,7 @@ func yaml_emitter_select_scalar_style(emitter *yaml_emitter_t, event *yaml_event
|
|||
return true
|
||||
}
|
||||
|
||||
// Write an achor.
|
||||
// Write an anchor.
|
||||
func yaml_emitter_process_anchor(emitter *yaml_emitter_t) bool {
|
||||
if emitter.anchor_data.anchor == nil {
|
||||
return true
|
||||
|
@ -995,9 +996,9 @@ func yaml_emitter_analyze_scalar(emitter *yaml_emitter_t, value []byte) bool {
|
|||
space_break = false
|
||||
|
||||
preceded_by_whitespace = false
|
||||
followed_by_whitespace = false
|
||||
previous_space = false
|
||||
previous_break = false
|
||||
followed_by_whitespace = false
|
||||
previous_space = false
|
||||
previous_break = false
|
||||
)
|
||||
|
||||
emitter.scalar_data.value = value
|
||||
|
|
|
@ -3,12 +3,14 @@ package yaml
|
|||
import (
|
||||
"encoding"
|
||||
"fmt"
|
||||
"io"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode/utf8"
|
||||
)
|
||||
|
||||
type encoder struct {
|
||||
|
@ -16,25 +18,39 @@ type encoder struct {
|
|||
event yaml_event_t
|
||||
out []byte
|
||||
flow bool
|
||||
// doneInit holds whether the initial stream_start_event has been
|
||||
// emitted.
|
||||
doneInit bool
|
||||
}
|
||||
|
||||
func newEncoder() (e *encoder) {
|
||||
e = &encoder{}
|
||||
e.must(yaml_emitter_initialize(&e.emitter))
|
||||
func newEncoder() *encoder {
|
||||
e := &encoder{}
|
||||
yaml_emitter_initialize(&e.emitter)
|
||||
yaml_emitter_set_output_string(&e.emitter, &e.out)
|
||||
yaml_emitter_set_unicode(&e.emitter, true)
|
||||
e.must(yaml_stream_start_event_initialize(&e.event, yaml_UTF8_ENCODING))
|
||||
e.emit()
|
||||
e.must(yaml_document_start_event_initialize(&e.event, nil, nil, true))
|
||||
e.emit()
|
||||
return e
|
||||
}
|
||||
|
||||
func (e *encoder) finish() {
|
||||
e.must(yaml_document_end_event_initialize(&e.event, true))
|
||||
func newEncoderWithWriter(w io.Writer) *encoder {
|
||||
e := &encoder{}
|
||||
yaml_emitter_initialize(&e.emitter)
|
||||
yaml_emitter_set_output_writer(&e.emitter, w)
|
||||
yaml_emitter_set_unicode(&e.emitter, true)
|
||||
return e
|
||||
}
|
||||
|
||||
func (e *encoder) init() {
|
||||
if e.doneInit {
|
||||
return
|
||||
}
|
||||
yaml_stream_start_event_initialize(&e.event, yaml_UTF8_ENCODING)
|
||||
e.emit()
|
||||
e.doneInit = true
|
||||
}
|
||||
|
||||
func (e *encoder) finish() {
|
||||
e.emitter.open_ended = false
|
||||
e.must(yaml_stream_end_event_initialize(&e.event))
|
||||
yaml_stream_end_event_initialize(&e.event)
|
||||
e.emit()
|
||||
}
|
||||
|
||||
|
@ -44,9 +60,7 @@ func (e *encoder) destroy() {
|
|||
|
||||
func (e *encoder) emit() {
|
||||
// This will internally delete the e.event value.
|
||||
if !yaml_emitter_emit(&e.emitter, &e.event) && e.event.typ != yaml_DOCUMENT_END_EVENT && e.event.typ != yaml_STREAM_END_EVENT {
|
||||
e.must(false)
|
||||
}
|
||||
e.must(yaml_emitter_emit(&e.emitter, &e.event))
|
||||
}
|
||||
|
||||
func (e *encoder) must(ok bool) {
|
||||
|
@ -59,13 +73,28 @@ func (e *encoder) must(ok bool) {
|
|||
}
|
||||
}
|
||||
|
||||
func (e *encoder) marshalDoc(tag string, in reflect.Value) {
|
||||
e.init()
|
||||
yaml_document_start_event_initialize(&e.event, nil, nil, true)
|
||||
e.emit()
|
||||
e.marshal(tag, in)
|
||||
yaml_document_end_event_initialize(&e.event, true)
|
||||
e.emit()
|
||||
}
|
||||
|
||||
func (e *encoder) marshal(tag string, in reflect.Value) {
|
||||
if !in.IsValid() {
|
||||
if !in.IsValid() || in.Kind() == reflect.Ptr && in.IsNil() {
|
||||
e.nilv()
|
||||
return
|
||||
}
|
||||
iface := in.Interface()
|
||||
if m, ok := iface.(Marshaler); ok {
|
||||
switch m := iface.(type) {
|
||||
case time.Time, *time.Time:
|
||||
// Although time.Time implements TextMarshaler,
|
||||
// we don't want to treat it as a string for YAML
|
||||
// purposes because YAML has special support for
|
||||
// timestamps.
|
||||
case Marshaler:
|
||||
v, err := m.MarshalYAML()
|
||||
if err != nil {
|
||||
fail(err)
|
||||
|
@ -75,31 +104,34 @@ func (e *encoder) marshal(tag string, in reflect.Value) {
|
|||
return
|
||||
}
|
||||
in = reflect.ValueOf(v)
|
||||
} else if m, ok := iface.(encoding.TextMarshaler); ok {
|
||||
case encoding.TextMarshaler:
|
||||
text, err := m.MarshalText()
|
||||
if err != nil {
|
||||
fail(err)
|
||||
}
|
||||
in = reflect.ValueOf(string(text))
|
||||
case nil:
|
||||
e.nilv()
|
||||
return
|
||||
}
|
||||
switch in.Kind() {
|
||||
case reflect.Interface:
|
||||
if in.IsNil() {
|
||||
e.nilv()
|
||||
} else {
|
||||
e.marshal(tag, in.Elem())
|
||||
}
|
||||
e.marshal(tag, in.Elem())
|
||||
case reflect.Map:
|
||||
e.mapv(tag, in)
|
||||
case reflect.Ptr:
|
||||
if in.IsNil() {
|
||||
e.nilv()
|
||||
if in.Type() == ptrTimeType {
|
||||
e.timev(tag, in.Elem())
|
||||
} else {
|
||||
e.marshal(tag, in.Elem())
|
||||
}
|
||||
case reflect.Struct:
|
||||
e.structv(tag, in)
|
||||
case reflect.Slice:
|
||||
if in.Type() == timeType {
|
||||
e.timev(tag, in)
|
||||
} else {
|
||||
e.structv(tag, in)
|
||||
}
|
||||
case reflect.Slice, reflect.Array:
|
||||
if in.Type().Elem() == mapItemType {
|
||||
e.itemsv(tag, in)
|
||||
} else {
|
||||
|
@ -191,10 +223,10 @@ func (e *encoder) mappingv(tag string, f func()) {
|
|||
e.flow = false
|
||||
style = yaml_FLOW_MAPPING_STYLE
|
||||
}
|
||||
e.must(yaml_mapping_start_event_initialize(&e.event, nil, []byte(tag), implicit, style))
|
||||
yaml_mapping_start_event_initialize(&e.event, nil, []byte(tag), implicit, style)
|
||||
e.emit()
|
||||
f()
|
||||
e.must(yaml_mapping_end_event_initialize(&e.event))
|
||||
yaml_mapping_end_event_initialize(&e.event)
|
||||
e.emit()
|
||||
}
|
||||
|
||||
|
@ -240,23 +272,36 @@ var base60float = regexp.MustCompile(`^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+(?:\.[0
|
|||
func (e *encoder) stringv(tag string, in reflect.Value) {
|
||||
var style yaml_scalar_style_t
|
||||
s := in.String()
|
||||
rtag, rs := resolve("", s)
|
||||
if rtag == yaml_BINARY_TAG {
|
||||
if tag == "" || tag == yaml_STR_TAG {
|
||||
tag = rtag
|
||||
s = rs.(string)
|
||||
} else if tag == yaml_BINARY_TAG {
|
||||
canUsePlain := true
|
||||
switch {
|
||||
case !utf8.ValidString(s):
|
||||
if tag == yaml_BINARY_TAG {
|
||||
failf("explicitly tagged !!binary data must be base64-encoded")
|
||||
} else {
|
||||
}
|
||||
if tag != "" {
|
||||
failf("cannot marshal invalid UTF-8 data as %s", shortTag(tag))
|
||||
}
|
||||
// It can't be encoded directly as YAML so use a binary tag
|
||||
// and encode it as base64.
|
||||
tag = yaml_BINARY_TAG
|
||||
s = encodeBase64(s)
|
||||
case tag == "":
|
||||
// Check to see if it would resolve to a specific
|
||||
// tag when encoded unquoted. If it doesn't,
|
||||
// there's no need to quote it.
|
||||
rtag, _ := resolve("", s)
|
||||
canUsePlain = rtag == yaml_STR_TAG && !isBase60Float(s)
|
||||
}
|
||||
if tag == "" && (rtag != yaml_STR_TAG || isBase60Float(s)) {
|
||||
style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
|
||||
} else if strings.Contains(s, "\n") {
|
||||
// Note: it's possible for user code to emit invalid YAML
|
||||
// if they explicitly specify a tag and a string containing
|
||||
// text that's incompatible with that tag.
|
||||
switch {
|
||||
case strings.Contains(s, "\n"):
|
||||
style = yaml_LITERAL_SCALAR_STYLE
|
||||
} else {
|
||||
case canUsePlain:
|
||||
style = yaml_PLAIN_SCALAR_STYLE
|
||||
default:
|
||||
style = yaml_DOUBLE_QUOTED_SCALAR_STYLE
|
||||
}
|
||||
e.emitScalar(s, "", tag, style)
|
||||
}
|
||||
|
@ -281,9 +326,20 @@ func (e *encoder) uintv(tag string, in reflect.Value) {
|
|||
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
|
||||
}
|
||||
|
||||
func (e *encoder) timev(tag string, in reflect.Value) {
|
||||
t := in.Interface().(time.Time)
|
||||
s := t.Format(time.RFC3339Nano)
|
||||
e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE)
|
||||
}
|
||||
|
||||
func (e *encoder) floatv(tag string, in reflect.Value) {
|
||||
// FIXME: Handle 64 bits here.
|
||||
s := strconv.FormatFloat(float64(in.Float()), 'g', -1, 32)
|
||||
// Issue #352: When formatting, use the precision of the underlying value
|
||||
precision := 64
|
||||
if in.Kind() == reflect.Float32 {
|
||||
precision = 32
|
||||
}
|
||||
|
||||
s := strconv.FormatFloat(in.Float(), 'g', -1, precision)
|
||||
switch s {
|
||||
case "+Inf":
|
||||
s = ".inf"
|
||||
|
|
|
@ -1,501 +0,0 @@
|
|||
package yaml_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
. "gopkg.in/check.v1"
|
||||
"gopkg.in/yaml.v2"
|
||||
"net"
|
||||
"os"
|
||||
)
|
||||
|
||||
var marshalIntTest = 123
|
||||
|
||||
var marshalTests = []struct {
|
||||
value interface{}
|
||||
data string
|
||||
}{
|
||||
{
|
||||
nil,
|
||||
"null\n",
|
||||
}, {
|
||||
&struct{}{},
|
||||
"{}\n",
|
||||
}, {
|
||||
map[string]string{"v": "hi"},
|
||||
"v: hi\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": "hi"},
|
||||
"v: hi\n",
|
||||
}, {
|
||||
map[string]string{"v": "true"},
|
||||
"v: \"true\"\n",
|
||||
}, {
|
||||
map[string]string{"v": "false"},
|
||||
"v: \"false\"\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": true},
|
||||
"v: true\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": false},
|
||||
"v: false\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": 10},
|
||||
"v: 10\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": -10},
|
||||
"v: -10\n",
|
||||
}, {
|
||||
map[string]uint{"v": 42},
|
||||
"v: 42\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": int64(4294967296)},
|
||||
"v: 4294967296\n",
|
||||
}, {
|
||||
map[string]int64{"v": int64(4294967296)},
|
||||
"v: 4294967296\n",
|
||||
}, {
|
||||
map[string]uint64{"v": 4294967296},
|
||||
"v: 4294967296\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": "10"},
|
||||
"v: \"10\"\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": 0.1},
|
||||
"v: 0.1\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": float64(0.1)},
|
||||
"v: 0.1\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": -0.1},
|
||||
"v: -0.1\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": math.Inf(+1)},
|
||||
"v: .inf\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": math.Inf(-1)},
|
||||
"v: -.inf\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": math.NaN()},
|
||||
"v: .nan\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": nil},
|
||||
"v: null\n",
|
||||
}, {
|
||||
map[string]interface{}{"v": ""},
|
||||
"v: \"\"\n",
|
||||
}, {
|
||||
map[string][]string{"v": []string{"A", "B"}},
|
||||
"v:\n- A\n- B\n",
|
||||
}, {
|
||||
map[string][]string{"v": []string{"A", "B\nC"}},
|
||||
"v:\n- A\n- |-\n B\n C\n",
|
||||
}, {
|
||||
map[string][]interface{}{"v": []interface{}{"A", 1, map[string][]int{"B": []int{2, 3}}}},
|
||||
"v:\n- A\n- 1\n- B:\n - 2\n - 3\n",
|
||||
}, {
|
||||
map[string]interface{}{"a": map[interface{}]interface{}{"b": "c"}},
|
||||
"a:\n b: c\n",
|
||||
}, {
|
||||
map[string]interface{}{"a": "-"},
|
||||
"a: '-'\n",
|
||||
},
|
||||
|
||||
// Simple values.
|
||||
{
|
||||
&marshalIntTest,
|
||||
"123\n",
|
||||
},
|
||||
|
||||
// Structures
|
||||
{
|
||||
&struct{ Hello string }{"world"},
|
||||
"hello: world\n",
|
||||
}, {
|
||||
&struct {
|
||||
A struct {
|
||||
B string
|
||||
}
|
||||
}{struct{ B string }{"c"}},
|
||||
"a:\n b: c\n",
|
||||
}, {
|
||||
&struct {
|
||||
A *struct {
|
||||
B string
|
||||
}
|
||||
}{&struct{ B string }{"c"}},
|
||||
"a:\n b: c\n",
|
||||
}, {
|
||||
&struct {
|
||||
A *struct {
|
||||
B string
|
||||
}
|
||||
}{},
|
||||
"a: null\n",
|
||||
}, {
|
||||
&struct{ A int }{1},
|
||||
"a: 1\n",
|
||||
}, {
|
||||
&struct{ A []int }{[]int{1, 2}},
|
||||
"a:\n- 1\n- 2\n",
|
||||
}, {
|
||||
&struct {
|
||||
B int "a"
|
||||
}{1},
|
||||
"a: 1\n",
|
||||
}, {
|
||||
&struct{ A bool }{true},
|
||||
"a: true\n",
|
||||
},
|
||||
|
||||
// Conditional flag
|
||||
{
|
||||
&struct {
|
||||
A int "a,omitempty"
|
||||
B int "b,omitempty"
|
||||
}{1, 0},
|
||||
"a: 1\n",
|
||||
}, {
|
||||
&struct {
|
||||
A int "a,omitempty"
|
||||
B int "b,omitempty"
|
||||
}{0, 0},
|
||||
"{}\n",
|
||||
}, {
|
||||
&struct {
|
||||
A *struct{ X, y int } "a,omitempty,flow"
|
||||
}{&struct{ X, y int }{1, 2}},
|
||||
"a: {x: 1}\n",
|
||||
}, {
|
||||
&struct {
|
||||
A *struct{ X, y int } "a,omitempty,flow"
|
||||
}{nil},
|
||||
"{}\n",
|
||||
}, {
|
||||
&struct {
|
||||
A *struct{ X, y int } "a,omitempty,flow"
|
||||
}{&struct{ X, y int }{}},
|
||||
"a: {x: 0}\n",
|
||||
}, {
|
||||
&struct {
|
||||
A struct{ X, y int } "a,omitempty,flow"
|
||||
}{struct{ X, y int }{1, 2}},
|
||||
"a: {x: 1}\n",
|
||||
}, {
|
||||
&struct {
|
||||
A struct{ X, y int } "a,omitempty,flow"
|
||||
}{struct{ X, y int }{0, 1}},
|
||||
"{}\n",
|
||||
}, {
|
||||
&struct {
|
||||
A float64 "a,omitempty"
|
||||
B float64 "b,omitempty"
|
||||
}{1, 0},
|
||||
"a: 1\n",
|
||||
},
|
||||
|
||||
// Flow flag
|
||||
{
|
||||
&struct {
|
||||
A []int "a,flow"
|
||||
}{[]int{1, 2}},
|
||||
"a: [1, 2]\n",
|
||||
}, {
|
||||
&struct {
|
||||
A map[string]string "a,flow"
|
||||
}{map[string]string{"b": "c", "d": "e"}},
|
||||
"a: {b: c, d: e}\n",
|
||||
}, {
|
||||
&struct {
|
||||
A struct {
|
||||
B, D string
|
||||
} "a,flow"
|
||||
}{struct{ B, D string }{"c", "e"}},
|
||||
"a: {b: c, d: e}\n",
|
||||
},
|
||||
|
||||
// Unexported field
|
||||
{
|
||||
&struct {
|
||||
u int
|
||||
A int
|
||||
}{0, 1},
|
||||
"a: 1\n",
|
||||
},
|
||||
|
||||
// Ignored field
|
||||
{
|
||||
&struct {
|
||||
A int
|
||||
B int "-"
|
||||
}{1, 2},
|
||||
"a: 1\n",
|
||||
},
|
||||
|
||||
// Struct inlining
|
||||
{
|
||||
&struct {
|
||||
A int
|
||||
C inlineB `yaml:",inline"`
|
||||
}{1, inlineB{2, inlineC{3}}},
|
||||
"a: 1\nb: 2\nc: 3\n",
|
||||
},
|
||||
|
||||
// Map inlining
|
||||
{
|
||||
&struct {
|
||||
A int
|
||||
C map[string]int `yaml:",inline"`
|
||||
}{1, map[string]int{"b": 2, "c": 3}},
|
||||
"a: 1\nb: 2\nc: 3\n",
|
||||
},
|
||||
|
||||
// Duration
|
||||
{
|
||||
map[string]time.Duration{"a": 3 * time.Second},
|
||||
"a: 3s\n",
|
||||
},
|
||||
|
||||
// Issue #24: bug in map merging logic.
|
||||
{
|
||||
map[string]string{"a": "<foo>"},
|
||||
"a: <foo>\n",
|
||||
},
|
||||
|
||||
// Issue #34: marshal unsupported base 60 floats quoted for compatibility
|
||||
// with old YAML 1.1 parsers.
|
||||
{
|
||||
map[string]string{"a": "1:1"},
|
||||
"a: \"1:1\"\n",
|
||||
},
|
||||
|
||||
// Binary data.
|
||||
{
|
||||
map[string]string{"a": "\x00"},
|
||||
"a: \"\\0\"\n",
|
||||
}, {
|
||||
map[string]string{"a": "\x80\x81\x82"},
|
||||
"a: !!binary gIGC\n",
|
||||
}, {
|
||||
map[string]string{"a": strings.Repeat("\x90", 54)},
|
||||
"a: !!binary |\n " + strings.Repeat("kJCQ", 17) + "kJ\n CQ\n",
|
||||
},
|
||||
|
||||
// Ordered maps.
|
||||
{
|
||||
&yaml.MapSlice{{"b", 2}, {"a", 1}, {"d", 4}, {"c", 3}, {"sub", yaml.MapSlice{{"e", 5}}}},
|
||||
"b: 2\na: 1\nd: 4\nc: 3\nsub:\n e: 5\n",
|
||||
},
|
||||
|
||||
// Encode unicode as utf-8 rather than in escaped form.
|
||||
{
|
||||
map[string]string{"a": "你好"},
|
||||
"a: 你好\n",
|
||||
},
|
||||
|
||||
// Support encoding.TextMarshaler.
|
||||
{
|
||||
map[string]net.IP{"a": net.IPv4(1, 2, 3, 4)},
|
||||
"a: 1.2.3.4\n",
|
||||
},
|
||||
{
|
||||
map[string]time.Time{"a": time.Unix(1424801979, 0)},
|
||||
"a: 2015-02-24T18:19:39Z\n",
|
||||
},
|
||||
|
||||
// Ensure strings containing ": " are quoted (reported as PR #43, but not reproducible).
|
||||
{
|
||||
map[string]string{"a": "b: c"},
|
||||
"a: 'b: c'\n",
|
||||
},
|
||||
|
||||
// Containing hash mark ('#') in string should be quoted
|
||||
{
|
||||
map[string]string{"a": "Hello #comment"},
|
||||
"a: 'Hello #comment'\n",
|
||||
},
|
||||
{
|
||||
map[string]string{"a": "你好 #comment"},
|
||||
"a: '你好 #comment'\n",
|
||||
},
|
||||
}
|
||||
|
||||
func (s *S) TestMarshal(c *C) {
|
||||
defer os.Setenv("TZ", os.Getenv("TZ"))
|
||||
os.Setenv("TZ", "UTC")
|
||||
for _, item := range marshalTests {
|
||||
data, err := yaml.Marshal(item.value)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(string(data), Equals, item.data)
|
||||
}
|
||||
}
|
||||
|
||||
var marshalErrorTests = []struct {
|
||||
value interface{}
|
||||
error string
|
||||
panic string
|
||||
}{{
|
||||
value: &struct {
|
||||
B int
|
||||
inlineB ",inline"
|
||||
}{1, inlineB{2, inlineC{3}}},
|
||||
panic: `Duplicated key 'b' in struct struct \{ B int; .*`,
|
||||
}, {
|
||||
value: &struct {
|
||||
A int
|
||||
B map[string]int ",inline"
|
||||
}{1, map[string]int{"a": 2}},
|
||||
panic: `Can't have key "a" in inlined map; conflicts with struct field`,
|
||||
}}
|
||||
|
||||
func (s *S) TestMarshalErrors(c *C) {
|
||||
for _, item := range marshalErrorTests {
|
||||
if item.panic != "" {
|
||||
c.Assert(func() { yaml.Marshal(item.value) }, PanicMatches, item.panic)
|
||||
} else {
|
||||
_, err := yaml.Marshal(item.value)
|
||||
c.Assert(err, ErrorMatches, item.error)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (s *S) TestMarshalTypeCache(c *C) {
|
||||
var data []byte
|
||||
var err error
|
||||
func() {
|
||||
type T struct{ A int }
|
||||
data, err = yaml.Marshal(&T{})
|
||||
c.Assert(err, IsNil)
|
||||
}()
|
||||
func() {
|
||||
type T struct{ B int }
|
||||
data, err = yaml.Marshal(&T{})
|
||||
c.Assert(err, IsNil)
|
||||
}()
|
||||
c.Assert(string(data), Equals, "b: 0\n")
|
||||
}
|
||||
|
||||
var marshalerTests = []struct {
|
||||
data string
|
||||
value interface{}
|
||||
}{
|
||||
{"_:\n hi: there\n", map[interface{}]interface{}{"hi": "there"}},
|
||||
{"_:\n- 1\n- A\n", []interface{}{1, "A"}},
|
||||
{"_: 10\n", 10},
|
||||
{"_: null\n", nil},
|
||||
{"_: BAR!\n", "BAR!"},
|
||||
}
|
||||
|
||||
type marshalerType struct {
|
||||
value interface{}
|
||||
}
|
||||
|
||||
func (o marshalerType) MarshalText() ([]byte, error) {
|
||||
panic("MarshalText called on type with MarshalYAML")
|
||||
}
|
||||
|
||||
func (o marshalerType) MarshalYAML() (interface{}, error) {
|
||||
return o.value, nil
|
||||
}
|
||||
|
||||
type marshalerValue struct {
|
||||
Field marshalerType "_"
|
||||
}
|
||||
|
||||
func (s *S) TestMarshaler(c *C) {
|
||||
for _, item := range marshalerTests {
|
||||
obj := &marshalerValue{}
|
||||
obj.Field.value = item.value
|
||||
data, err := yaml.Marshal(obj)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(string(data), Equals, string(item.data))
|
||||
}
|
||||
}
|
||||
|
||||
func (s *S) TestMarshalerWholeDocument(c *C) {
|
||||
obj := &marshalerType{}
|
||||
obj.value = map[string]string{"hello": "world!"}
|
||||
data, err := yaml.Marshal(obj)
|
||||
c.Assert(err, IsNil)
|
||||
c.Assert(string(data), Equals, "hello: world!\n")
|
||||
}
|
||||
|
||||
type failingMarshaler struct{}
|
||||
|
||||
func (ft *failingMarshaler) MarshalYAML() (interface{}, error) {
|
||||
return nil, failingErr
|
||||
}
|
||||
|
||||
func (s *S) TestMarshalerError(c *C) {
|
||||
_, err := yaml.Marshal(&failingMarshaler{})
|
||||
c.Assert(err, Equals, failingErr)
|
||||
}
|
||||
|
||||
func (s *S) TestSortedOutput(c *C) {
|
||||
order := []interface{}{
|
||||
false,
|
||||
true,
|
||||
1,
|
||||
uint(1),
|
||||
1.0,
|
||||
1.1,
|
||||
1.2,
|
||||
2,
|
||||
uint(2),
|
||||
2.0,
|
||||
2.1,
|
||||
"",
|
||||
".1",
|
||||
".2",
|
||||
".a",
|
||||
"1",
|
||||
"2",
|
||||
"a!10",
|
||||
"a/2",
|
||||
"a/10",
|
||||
"a~10",
|
||||
"ab/1",
|
||||
"b/1",
|
||||
"b/01",
|
||||
"b/2",
|
||||
"b/02",
|
||||
"b/3",
|
||||
"b/03",
|
||||
"b1",
|
||||
"b01",
|
||||
"b3",
|
||||
"c2.10",
|
||||
"c10.2",
|
||||
"d1",
|
||||
"d12",
|
||||
"d12a",
|
||||
}
|
||||
m := make(map[interface{}]int)
|
||||
for _, k := range order {
|
||||
m[k] = 1
|
||||
}
|
||||
data, err := yaml.Marshal(m)
|
||||
c.Assert(err, IsNil)
|
||||
out := "\n" + string(data)
|
||||
last := 0
|
||||
for i, k := range order {
|
||||
repr := fmt.Sprint(k)
|
||||
if s, ok := k.(string); ok {
|
||||
if _, err = strconv.ParseFloat(repr, 32); s == "" || err == nil {
|
||||
repr = `"` + repr + `"`
|
||||
}
|
||||
}
|
||||
index := strings.Index(out, "\n"+repr+":")
|
||||
if index == -1 {
|
||||
c.Fatalf("%#v is not in the output: %#v", k, out)
|
||||
}
|
||||
if index < last {
|
||||
c.Fatalf("%#v was generated before %#v: %q", k, order[i-1], out)
|
||||
}
|
||||
last = index
|
||||
}
|
||||
}
|
|
@ -1,41 +0,0 @@
|
|||
package yaml_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
// An example showing how to unmarshal embedded
|
||||
// structs from YAML.
|
||||
|
||||
type StructA struct {
|
||||
A string `yaml:"a"`
|
||||
}
|
||||
|
||||
type StructB struct {
|
||||
// Embedded structs are not treated as embedded in YAML by default. To do that,
|
||||
// add the ",inline" annotation below
|
||||
StructA `yaml:",inline"`
|
||||
B string `yaml:"b"`
|
||||
}
|
||||
|
||||
var data = `
|
||||
a: a string from struct A
|
||||
b: a string from struct B
|
||||
`
|
||||
|
||||
func ExampleUnmarshal_embedded() {
|
||||
var b StructB
|
||||
|
||||
err := yaml.Unmarshal([]byte(data), &b)
|
||||
if err != nil {
|
||||
log.Fatal("cannot unmarshal data: %v", err)
|
||||
}
|
||||
fmt.Println(b.A)
|
||||
fmt.Println(b.B)
|
||||
// Output:
|
||||
// a string from struct A
|
||||
// a string from struct B
|
||||
}
|
|
@ -93,9 +93,18 @@ func yaml_parser_update_buffer(parser *yaml_parser_t, length int) bool {
|
|||
panic("read handler must be set")
|
||||
}
|
||||
|
||||
// [Go] This function was changed to guarantee the requested length size at EOF.
|
||||
// The fact we need to do this is pretty awful, but the description above implies
|
||||
// for that to be the case, and there are tests
|
||||
|
||||
// If the EOF flag is set and the raw buffer is empty, do nothing.
|
||||
if parser.eof && parser.raw_buffer_pos == len(parser.raw_buffer) {
|
||||
return true
|
||||
// [Go] ACTUALLY! Read the documentation of this function above.
|
||||
// This is just broken. To return true, we need to have the
|
||||
// given length in the buffer. Not doing that means every single
|
||||
// check that calls this function to make sure the buffer has a
|
||||
// given length is Go) panicking; or C) accessing invalid memory.
|
||||
//return true
|
||||
}
|
||||
|
||||
// Return if the buffer contains enough characters.
|
||||
|
@ -389,6 +398,15 @@ func yaml_parser_update_buffer(parser *yaml_parser_t, length int) bool {
|
|||
break
|
||||
}
|
||||
}
|
||||
// [Go] Read the documentation of this function above. To return true,
|
||||
// we need to have the given length in the buffer. Not doing that means
|
||||
// every single check that calls this function to make sure the buffer
|
||||
// has a given length is Go) panicking; or C) accessing invalid memory.
|
||||
// This happens here due to the EOF above breaking early.
|
||||
for buffer_len < length {
|
||||
parser.buffer[buffer_len] = 0
|
||||
buffer_len++
|
||||
}
|
||||
parser.buffer = parser.buffer[:buffer_len]
|
||||
return true
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@ import (
|
|||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode/utf8"
|
||||
"time"
|
||||
)
|
||||
|
||||
type resolveMapItem struct {
|
||||
|
@ -75,7 +75,7 @@ func longTag(tag string) string {
|
|||
|
||||
func resolvableTag(tag string) bool {
|
||||
switch tag {
|
||||
case "", yaml_STR_TAG, yaml_BOOL_TAG, yaml_INT_TAG, yaml_FLOAT_TAG, yaml_NULL_TAG:
|
||||
case "", yaml_STR_TAG, yaml_BOOL_TAG, yaml_INT_TAG, yaml_FLOAT_TAG, yaml_NULL_TAG, yaml_TIMESTAMP_TAG:
|
||||
return true
|
||||
}
|
||||
return false
|
||||
|
@ -92,6 +92,19 @@ func resolve(tag string, in string) (rtag string, out interface{}) {
|
|||
switch tag {
|
||||
case "", rtag, yaml_STR_TAG, yaml_BINARY_TAG:
|
||||
return
|
||||
case yaml_FLOAT_TAG:
|
||||
if rtag == yaml_INT_TAG {
|
||||
switch v := out.(type) {
|
||||
case int64:
|
||||
rtag = yaml_FLOAT_TAG
|
||||
out = float64(v)
|
||||
return
|
||||
case int:
|
||||
rtag = yaml_FLOAT_TAG
|
||||
out = float64(v)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
failf("cannot decode %s `%s` as a %s", shortTag(rtag), in, shortTag(tag))
|
||||
}()
|
||||
|
@ -125,6 +138,15 @@ func resolve(tag string, in string) (rtag string, out interface{}) {
|
|||
|
||||
case 'D', 'S':
|
||||
// Int, float, or timestamp.
|
||||
// Only try values as a timestamp if the value is unquoted or there's an explicit
|
||||
// !!timestamp tag.
|
||||
if tag == "" || tag == yaml_TIMESTAMP_TAG {
|
||||
t, ok := parseTimestamp(in)
|
||||
if ok {
|
||||
return yaml_TIMESTAMP_TAG, t
|
||||
}
|
||||
}
|
||||
|
||||
plain := strings.Replace(in, "_", "", -1)
|
||||
intv, err := strconv.ParseInt(plain, 0, 64)
|
||||
if err == nil {
|
||||
|
@ -158,28 +180,20 @@ func resolve(tag string, in string) (rtag string, out interface{}) {
|
|||
return yaml_INT_TAG, uintv
|
||||
}
|
||||
} else if strings.HasPrefix(plain, "-0b") {
|
||||
intv, err := strconv.ParseInt(plain[3:], 2, 64)
|
||||
intv, err := strconv.ParseInt("-" + plain[3:], 2, 64)
|
||||
if err == nil {
|
||||
if intv == int64(int(intv)) {
|
||||
return yaml_INT_TAG, -int(intv)
|
||||
if true || intv == int64(int(intv)) {
|
||||
return yaml_INT_TAG, int(intv)
|
||||
} else {
|
||||
return yaml_INT_TAG, -intv
|
||||
return yaml_INT_TAG, intv
|
||||
}
|
||||
}
|
||||
}
|
||||
// XXX Handle timestamps here.
|
||||
|
||||
default:
|
||||
panic("resolveTable item not yet handled: " + string(rune(hint)) + " (with " + in + ")")
|
||||
}
|
||||
}
|
||||
if tag == yaml_BINARY_TAG {
|
||||
return yaml_BINARY_TAG, in
|
||||
}
|
||||
if utf8.ValidString(in) {
|
||||
return yaml_STR_TAG, in
|
||||
}
|
||||
return yaml_BINARY_TAG, encodeBase64(in)
|
||||
return yaml_STR_TAG, in
|
||||
}
|
||||
|
||||
// encodeBase64 encodes s as base64 that is broken up into multiple lines
|
||||
|
@ -206,3 +220,39 @@ func encodeBase64(s string) string {
|
|||
}
|
||||
return string(out[:k])
|
||||
}
|
||||
|
||||
// This is a subset of the formats allowed by the regular expression
|
||||
// defined at http://yaml.org/type/timestamp.html.
|
||||
var allowedTimestampFormats = []string{
|
||||
"2006-1-2T15:4:5.999999999Z07:00", // RCF3339Nano with short date fields.
|
||||
"2006-1-2t15:4:5.999999999Z07:00", // RFC3339Nano with short date fields and lower-case "t".
|
||||
"2006-1-2 15:4:5.999999999", // space separated with no time zone
|
||||
"2006-1-2", // date only
|
||||
// Notable exception: time.Parse cannot handle: "2001-12-14 21:59:43.10 -5"
|
||||
// from the set of examples.
|
||||
}
|
||||
|
||||
// parseTimestamp parses s as a timestamp string and
|
||||
// returns the timestamp and reports whether it succeeded.
|
||||
// Timestamp formats are defined at http://yaml.org/type/timestamp.html
|
||||
func parseTimestamp(s string) (time.Time, bool) {
|
||||
// TODO write code to check all the formats supported by
|
||||
// http://yaml.org/type/timestamp.html instead of using time.Parse.
|
||||
|
||||
// Quick check: all date formats start with YYYY-.
|
||||
i := 0
|
||||
for ; i < len(s); i++ {
|
||||
if c := s[i]; c < '0' || c > '9' {
|
||||
break
|
||||
}
|
||||
}
|
||||
if i != 4 || i == len(s) || s[i] != '-' {
|
||||
return time.Time{}, false
|
||||
}
|
||||
for _, format := range allowedTimestampFormats {
|
||||
if t, err := time.Parse(format, s); err == nil {
|
||||
return t, true
|
||||
}
|
||||
}
|
||||
return time.Time{}, false
|
||||
}
|
||||
|
|
|
@ -871,12 +871,6 @@ func yaml_parser_save_simple_key(parser *yaml_parser_t) bool {
|
|||
|
||||
required := parser.flow_level == 0 && parser.indent == parser.mark.column
|
||||
|
||||
// A simple key is required only when it is the first token in the current
|
||||
// line. Therefore it is always allowed. But we add a check anyway.
|
||||
if required && !parser.simple_key_allowed {
|
||||
panic("should not happen")
|
||||
}
|
||||
|
||||
//
|
||||
// If the current position may start a simple key, save it.
|
||||
//
|
||||
|
@ -2475,6 +2469,10 @@ func yaml_parser_scan_flow_scalar(parser *yaml_parser_t, token *yaml_token_t, si
|
|||
}
|
||||
}
|
||||
|
||||
if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
|
||||
return false
|
||||
}
|
||||
|
||||
// Check if we are at the end of the scalar.
|
||||
if single {
|
||||
if parser.buffer[parser.buffer_pos] == '\'' {
|
||||
|
@ -2487,10 +2485,6 @@ func yaml_parser_scan_flow_scalar(parser *yaml_parser_t, token *yaml_token_t, si
|
|||
}
|
||||
|
||||
// Consume blank characters.
|
||||
if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) {
|
||||
return false
|
||||
}
|
||||
|
||||
for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) {
|
||||
if is_blank(parser.buffer, parser.buffer_pos) {
|
||||
// Consume a space or a tab character.
|
||||
|
@ -2592,19 +2586,10 @@ func yaml_parser_scan_plain_scalar(parser *yaml_parser_t, token *yaml_token_t) b
|
|||
// Consume non-blank characters.
|
||||
for !is_blankz(parser.buffer, parser.buffer_pos) {
|
||||
|
||||
// Check for 'x:x' in the flow context. TODO: Fix the test "spec-08-13".
|
||||
if parser.flow_level > 0 &&
|
||||
parser.buffer[parser.buffer_pos] == ':' &&
|
||||
!is_blankz(parser.buffer, parser.buffer_pos+1) {
|
||||
yaml_parser_set_scanner_error(parser, "while scanning a plain scalar",
|
||||
start_mark, "found unexpected ':'")
|
||||
return false
|
||||
}
|
||||
|
||||
// Check for indicators that may end a plain scalar.
|
||||
if (parser.buffer[parser.buffer_pos] == ':' && is_blankz(parser.buffer, parser.buffer_pos+1)) ||
|
||||
(parser.flow_level > 0 &&
|
||||
(parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == ':' ||
|
||||
(parser.buffer[parser.buffer_pos] == ',' ||
|
||||
parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == '[' ||
|
||||
parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' ||
|
||||
parser.buffer[parser.buffer_pos] == '}')) {
|
||||
|
@ -2656,10 +2641,10 @@ func yaml_parser_scan_plain_scalar(parser *yaml_parser_t, token *yaml_token_t) b
|
|||
for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) {
|
||||
if is_blank(parser.buffer, parser.buffer_pos) {
|
||||
|
||||
// Check for tab character that abuse indentation.
|
||||
// Check for tab characters that abuse indentation.
|
||||
if leading_blanks && parser.mark.column < indent && is_tab(parser.buffer, parser.buffer_pos) {
|
||||
yaml_parser_set_scanner_error(parser, "while scanning a plain scalar",
|
||||
start_mark, "found a tab character that violate indentation")
|
||||
start_mark, "found a tab character that violates indentation")
|
||||
return false
|
||||
}
|
||||
|
||||
|
|
|
@ -51,6 +51,15 @@ func (l keyList) Less(i, j int) bool {
|
|||
}
|
||||
var ai, bi int
|
||||
var an, bn int64
|
||||
if ar[i] == '0' || br[i] == '0' {
|
||||
for j := i-1; j >= 0 && unicode.IsDigit(ar[j]); j-- {
|
||||
if ar[j] != '0' {
|
||||
an = 1
|
||||
bn = 1
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
for ai = i; ai < len(ar) && unicode.IsDigit(ar[ai]); ai++ {
|
||||
an = an*10 + int64(ar[ai]-'0')
|
||||
}
|
||||
|
|
|
@ -1,12 +0,0 @@
|
|||
package yaml_test
|
||||
|
||||
import (
|
||||
. "gopkg.in/check.v1"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func Test(t *testing.T) { TestingT(t) }
|
||||
|
||||
type S struct{}
|
||||
|
||||
var _ = Suite(&S{})
|
|
@ -18,72 +18,9 @@ func yaml_emitter_flush(emitter *yaml_emitter_t) bool {
|
|||
return true
|
||||
}
|
||||
|
||||
// If the output encoding is UTF-8, we don't need to recode the buffer.
|
||||
if emitter.encoding == yaml_UTF8_ENCODING {
|
||||
if err := emitter.write_handler(emitter, emitter.buffer[:emitter.buffer_pos]); err != nil {
|
||||
return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error())
|
||||
}
|
||||
emitter.buffer_pos = 0
|
||||
return true
|
||||
}
|
||||
|
||||
// Recode the buffer into the raw buffer.
|
||||
var low, high int
|
||||
if emitter.encoding == yaml_UTF16LE_ENCODING {
|
||||
low, high = 0, 1
|
||||
} else {
|
||||
high, low = 1, 0
|
||||
}
|
||||
|
||||
pos := 0
|
||||
for pos < emitter.buffer_pos {
|
||||
// See the "reader.c" code for more details on UTF-8 encoding. Note
|
||||
// that we assume that the buffer contains a valid UTF-8 sequence.
|
||||
|
||||
// Read the next UTF-8 character.
|
||||
octet := emitter.buffer[pos]
|
||||
|
||||
var w int
|
||||
var value rune
|
||||
switch {
|
||||
case octet&0x80 == 0x00:
|
||||
w, value = 1, rune(octet&0x7F)
|
||||
case octet&0xE0 == 0xC0:
|
||||
w, value = 2, rune(octet&0x1F)
|
||||
case octet&0xF0 == 0xE0:
|
||||
w, value = 3, rune(octet&0x0F)
|
||||
case octet&0xF8 == 0xF0:
|
||||
w, value = 4, rune(octet&0x07)
|
||||
}
|
||||
for k := 1; k < w; k++ {
|
||||
octet = emitter.buffer[pos+k]
|
||||
value = (value << 6) + (rune(octet) & 0x3F)
|
||||
}
|
||||
pos += w
|
||||
|
||||
// Write the character.
|
||||
if value < 0x10000 {
|
||||
var b [2]byte
|
||||
b[high] = byte(value >> 8)
|
||||
b[low] = byte(value & 0xFF)
|
||||
emitter.raw_buffer = append(emitter.raw_buffer, b[0], b[1])
|
||||
} else {
|
||||
// Write the character using a surrogate pair (check "reader.c").
|
||||
var b [4]byte
|
||||
value -= 0x10000
|
||||
b[high] = byte(0xD8 + (value >> 18))
|
||||
b[low] = byte((value >> 10) & 0xFF)
|
||||
b[high+2] = byte(0xDC + ((value >> 8) & 0xFF))
|
||||
b[low+2] = byte(value & 0xFF)
|
||||
emitter.raw_buffer = append(emitter.raw_buffer, b[0], b[1], b[2], b[3])
|
||||
}
|
||||
}
|
||||
|
||||
// Write the raw buffer.
|
||||
if err := emitter.write_handler(emitter, emitter.raw_buffer); err != nil {
|
||||
if err := emitter.write_handler(emitter, emitter.buffer[:emitter.buffer_pos]); err != nil {
|
||||
return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error())
|
||||
}
|
||||
emitter.buffer_pos = 0
|
||||
emitter.raw_buffer = emitter.raw_buffer[:0]
|
||||
return true
|
||||
}
|
||||
|
|
|
@ -9,6 +9,7 @@ package yaml
|
|||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"reflect"
|
||||
"strings"
|
||||
"sync"
|
||||
|
@ -81,12 +82,58 @@ func Unmarshal(in []byte, out interface{}) (err error) {
|
|||
}
|
||||
|
||||
// UnmarshalStrict is like Unmarshal except that any fields that are found
|
||||
// in the data that do not have corresponding struct members will result in
|
||||
// in the data that do not have corresponding struct members, or mapping
|
||||
// keys that are duplicates, will result in
|
||||
// an error.
|
||||
func UnmarshalStrict(in []byte, out interface{}) (err error) {
|
||||
return unmarshal(in, out, true)
|
||||
}
|
||||
|
||||
// A Decorder reads and decodes YAML values from an input stream.
|
||||
type Decoder struct {
|
||||
strict bool
|
||||
parser *parser
|
||||
}
|
||||
|
||||
// NewDecoder returns a new decoder that reads from r.
|
||||
//
|
||||
// The decoder introduces its own buffering and may read
|
||||
// data from r beyond the YAML values requested.
|
||||
func NewDecoder(r io.Reader) *Decoder {
|
||||
return &Decoder{
|
||||
parser: newParserFromReader(r),
|
||||
}
|
||||
}
|
||||
|
||||
// SetStrict sets whether strict decoding behaviour is enabled when
|
||||
// decoding items in the data (see UnmarshalStrict). By default, decoding is not strict.
|
||||
func (dec *Decoder) SetStrict(strict bool) {
|
||||
dec.strict = strict
|
||||
}
|
||||
|
||||
// Decode reads the next YAML-encoded value from its input
|
||||
// and stores it in the value pointed to by v.
|
||||
//
|
||||
// See the documentation for Unmarshal for details about the
|
||||
// conversion of YAML into a Go value.
|
||||
func (dec *Decoder) Decode(v interface{}) (err error) {
|
||||
d := newDecoder(dec.strict)
|
||||
defer handleErr(&err)
|
||||
node := dec.parser.parse()
|
||||
if node == nil {
|
||||
return io.EOF
|
||||
}
|
||||
out := reflect.ValueOf(v)
|
||||
if out.Kind() == reflect.Ptr && !out.IsNil() {
|
||||
out = out.Elem()
|
||||
}
|
||||
d.unmarshal(node, out)
|
||||
if len(d.terrors) > 0 {
|
||||
return &TypeError{d.terrors}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func unmarshal(in []byte, out interface{}, strict bool) (err error) {
|
||||
defer handleErr(&err)
|
||||
d := newDecoder(strict)
|
||||
|
@ -110,8 +157,8 @@ func unmarshal(in []byte, out interface{}, strict bool) (err error) {
|
|||
// of the generated document will reflect the structure of the value itself.
|
||||
// Maps and pointers (to struct, string, int, etc) are accepted as the in value.
|
||||
//
|
||||
// Struct fields are only unmarshalled if they are exported (have an upper case
|
||||
// first letter), and are unmarshalled using the field name lowercased as the
|
||||
// Struct fields are only marshalled if they are exported (have an upper case
|
||||
// first letter), and are marshalled using the field name lowercased as the
|
||||
// default key. Custom keys may be defined via the "yaml" name in the field
|
||||
// tag: the content preceding the first comma is used as the key, and the
|
||||
// following comma-separated options are used to tweak the marshalling process.
|
||||
|
@ -125,7 +172,10 @@ func unmarshal(in []byte, out interface{}, strict bool) (err error) {
|
|||
//
|
||||
// omitempty Only include the field if it's not set to the zero
|
||||
// value for the type or to empty slices or maps.
|
||||
// Does not apply to zero valued structs.
|
||||
// Zero valued structs will be omitted if all their public
|
||||
// fields are zero, unless they implement an IsZero
|
||||
// method (see the IsZeroer interface type), in which
|
||||
// case the field will be included if that method returns true.
|
||||
//
|
||||
// flow Marshal using a flow style (useful for structs,
|
||||
// sequences and maps).
|
||||
|
@ -140,7 +190,7 @@ func unmarshal(in []byte, out interface{}, strict bool) (err error) {
|
|||
// For example:
|
||||
//
|
||||
// type T struct {
|
||||
// F int "a,omitempty"
|
||||
// F int `yaml:"a,omitempty"`
|
||||
// B int
|
||||
// }
|
||||
// yaml.Marshal(&T{B: 2}) // Returns "b: 2\n"
|
||||
|
@ -150,12 +200,47 @@ func Marshal(in interface{}) (out []byte, err error) {
|
|||
defer handleErr(&err)
|
||||
e := newEncoder()
|
||||
defer e.destroy()
|
||||
e.marshal("", reflect.ValueOf(in))
|
||||
e.marshalDoc("", reflect.ValueOf(in))
|
||||
e.finish()
|
||||
out = e.out
|
||||
return
|
||||
}
|
||||
|
||||
// An Encoder writes YAML values to an output stream.
|
||||
type Encoder struct {
|
||||
encoder *encoder
|
||||
}
|
||||
|
||||
// NewEncoder returns a new encoder that writes to w.
|
||||
// The Encoder should be closed after use to flush all data
|
||||
// to w.
|
||||
func NewEncoder(w io.Writer) *Encoder {
|
||||
return &Encoder{
|
||||
encoder: newEncoderWithWriter(w),
|
||||
}
|
||||
}
|
||||
|
||||
// Encode writes the YAML encoding of v to the stream.
|
||||
// If multiple items are encoded to the stream, the
|
||||
// second and subsequent document will be preceded
|
||||
// with a "---" document separator, but the first will not.
|
||||
//
|
||||
// See the documentation for Marshal for details about the conversion of Go
|
||||
// values to YAML.
|
||||
func (e *Encoder) Encode(v interface{}) (err error) {
|
||||
defer handleErr(&err)
|
||||
e.encoder.marshalDoc("", reflect.ValueOf(v))
|
||||
return nil
|
||||
}
|
||||
|
||||
// Close closes the encoder by writing any remaining data.
|
||||
// It does not write a stream terminating string "...".
|
||||
func (e *Encoder) Close() (err error) {
|
||||
defer handleErr(&err)
|
||||
e.encoder.finish()
|
||||
return nil
|
||||
}
|
||||
|
||||
func handleErr(err *error) {
|
||||
if v := recover(); v != nil {
|
||||
if e, ok := v.(yamlError); ok {
|
||||
|
@ -211,6 +296,9 @@ type fieldInfo struct {
|
|||
Num int
|
||||
OmitEmpty bool
|
||||
Flow bool
|
||||
// Id holds the unique field identifier, so we can cheaply
|
||||
// check for field duplicates without maintaining an extra map.
|
||||
Id int
|
||||
|
||||
// Inline holds the field index if the field is part of an inlined struct.
|
||||
Inline []int
|
||||
|
@ -290,6 +378,7 @@ func getStructInfo(st reflect.Type) (*structInfo, error) {
|
|||
} else {
|
||||
finfo.Inline = append([]int{i}, finfo.Inline...)
|
||||
}
|
||||
finfo.Id = len(fieldsList)
|
||||
fieldsMap[finfo.Key] = finfo
|
||||
fieldsList = append(fieldsList, finfo)
|
||||
}
|
||||
|
@ -311,11 +400,16 @@ func getStructInfo(st reflect.Type) (*structInfo, error) {
|
|||
return nil, errors.New(msg)
|
||||
}
|
||||
|
||||
info.Id = len(fieldsList)
|
||||
fieldsList = append(fieldsList, info)
|
||||
fieldsMap[info.Key] = info
|
||||
}
|
||||
|
||||
sinfo = &structInfo{fieldsMap, fieldsList, inlineMap}
|
||||
sinfo = &structInfo{
|
||||
FieldsMap: fieldsMap,
|
||||
FieldsList: fieldsList,
|
||||
InlineMap: inlineMap,
|
||||
}
|
||||
|
||||
fieldMapMutex.Lock()
|
||||
structMap[st] = sinfo
|
||||
|
@ -323,8 +417,23 @@ func getStructInfo(st reflect.Type) (*structInfo, error) {
|
|||
return sinfo, nil
|
||||
}
|
||||
|
||||
// IsZeroer is used to check whether an object is zero to
|
||||
// determine whether it should be omitted when marshaling
|
||||
// with the omitempty flag. One notable implementation
|
||||
// is time.Time.
|
||||
type IsZeroer interface {
|
||||
IsZero() bool
|
||||
}
|
||||
|
||||
func isZero(v reflect.Value) bool {
|
||||
switch v.Kind() {
|
||||
kind := v.Kind()
|
||||
if z, ok := v.Interface().(IsZeroer); ok {
|
||||
if (kind == reflect.Ptr || kind == reflect.Interface) && v.IsNil() {
|
||||
return true
|
||||
}
|
||||
return z.IsZero()
|
||||
}
|
||||
switch kind {
|
||||
case reflect.String:
|
||||
return len(v.String()) == 0
|
||||
case reflect.Interface, reflect.Ptr:
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package yaml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
)
|
||||
|
||||
|
@ -239,6 +240,27 @@ const (
|
|||
yaml_MAPPING_END_EVENT // A MAPPING-END event.
|
||||
)
|
||||
|
||||
var eventStrings = []string{
|
||||
yaml_NO_EVENT: "none",
|
||||
yaml_STREAM_START_EVENT: "stream start",
|
||||
yaml_STREAM_END_EVENT: "stream end",
|
||||
yaml_DOCUMENT_START_EVENT: "document start",
|
||||
yaml_DOCUMENT_END_EVENT: "document end",
|
||||
yaml_ALIAS_EVENT: "alias",
|
||||
yaml_SCALAR_EVENT: "scalar",
|
||||
yaml_SEQUENCE_START_EVENT: "sequence start",
|
||||
yaml_SEQUENCE_END_EVENT: "sequence end",
|
||||
yaml_MAPPING_START_EVENT: "mapping start",
|
||||
yaml_MAPPING_END_EVENT: "mapping end",
|
||||
}
|
||||
|
||||
func (e yaml_event_type_t) String() string {
|
||||
if e < 0 || int(e) >= len(eventStrings) {
|
||||
return fmt.Sprintf("unknown event %d", e)
|
||||
}
|
||||
return eventStrings[e]
|
||||
}
|
||||
|
||||
// The event structure.
|
||||
type yaml_event_t struct {
|
||||
|
||||
|
@ -521,9 +543,9 @@ type yaml_parser_t struct {
|
|||
|
||||
read_handler yaml_read_handler_t // Read handler.
|
||||
|
||||
input_file io.Reader // File input data.
|
||||
input []byte // String input data.
|
||||
input_pos int
|
||||
input_reader io.Reader // File input data.
|
||||
input []byte // String input data.
|
||||
input_pos int
|
||||
|
||||
eof bool // EOF flag
|
||||
|
||||
|
@ -632,7 +654,7 @@ type yaml_emitter_t struct {
|
|||
write_handler yaml_write_handler_t // Write handler.
|
||||
|
||||
output_buffer *[]byte // String output data.
|
||||
output_file io.Writer // File output data.
|
||||
output_writer io.Writer // File output data.
|
||||
|
||||
buffer []byte // The working buffer.
|
||||
buffer_pos int // The current position of the buffer.
|
||||
|
|
Loading…
Reference in New Issue