mirror of https://github.com/knative/client.git
Update core cli dependencies (#1851)
* Update core cli dependencies * Update vendor dir
This commit is contained in:
parent
62d2cbffa0
commit
cb35f6a25a
19
go.mod
19
go.mod
|
|
@ -7,17 +7,17 @@ require (
|
|||
github.com/hashicorp/golang-lru v1.0.2
|
||||
github.com/hashicorp/hcl v1.0.0
|
||||
github.com/mitchellh/go-homedir v1.1.0
|
||||
github.com/spf13/afero v1.9.2 // indirect
|
||||
github.com/spf13/cobra v1.6.0
|
||||
github.com/spf13/afero v1.9.5 // indirect
|
||||
github.com/spf13/cobra v1.7.0
|
||||
github.com/spf13/pflag v1.0.5
|
||||
github.com/spf13/viper v1.13.0
|
||||
github.com/spf13/viper v1.16.0
|
||||
golang.org/x/mod v0.12.0
|
||||
golang.org/x/term v0.11.0
|
||||
gotest.tools/v3 v3.3.0
|
||||
k8s.io/api v0.26.5
|
||||
k8s.io/apiextensions-apiserver v0.26.5
|
||||
k8s.io/apimachinery v0.26.5
|
||||
k8s.io/cli-runtime v0.25.2
|
||||
k8s.io/cli-runtime v0.26.5
|
||||
k8s.io/client-go v0.26.5
|
||||
k8s.io/code-generator v0.26.5
|
||||
knative.dev/client-pkg v0.0.0-20230815131440-5abd12981b4b
|
||||
|
|
@ -66,12 +66,12 @@ require (
|
|||
github.com/gregjones/httpcache v0.0.0-20190611155906-901d90724c79 // indirect
|
||||
github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.3 // indirect
|
||||
github.com/imdario/mergo v0.3.13 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.0.1 // indirect
|
||||
github.com/inconshreveable/mousetrap v1.1.0 // indirect
|
||||
github.com/josharian/intern v1.0.0 // indirect
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/kelseyhightower/envconfig v1.4.0 // indirect
|
||||
github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de // indirect
|
||||
github.com/magiconair/properties v1.8.6 // indirect
|
||||
github.com/magiconair/properties v1.8.7 // indirect
|
||||
github.com/mailru/easyjson v0.7.7 // indirect
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
|
||||
github.com/mitchellh/mapstructure v1.5.0 // indirect
|
||||
|
|
@ -81,8 +81,7 @@ require (
|
|||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect
|
||||
github.com/opencontainers/go-digest v1.0.0 // indirect
|
||||
github.com/openzipkin/zipkin-go v0.4.1 // indirect
|
||||
github.com/pelletier/go-toml v1.9.5 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.0.5 // indirect
|
||||
github.com/pelletier/go-toml/v2 v2.0.9 // indirect
|
||||
github.com/peterbourgon/diskv v2.0.1+incompatible // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/prometheus/client_golang v1.16.0 // indirect
|
||||
|
|
@ -95,9 +94,9 @@ require (
|
|||
github.com/robfig/cron/v3 v3.0.1 // indirect
|
||||
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
||||
github.com/sergi/go-diff v1.2.0 // indirect
|
||||
github.com/spf13/cast v1.5.0 // indirect
|
||||
github.com/spf13/cast v1.5.1 // indirect
|
||||
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
||||
github.com/subosito/gotenv v1.4.1 // indirect
|
||||
github.com/subosito/gotenv v1.6.0 // indirect
|
||||
github.com/xlab/treeprint v1.1.0 // indirect
|
||||
go.opencensus.io v0.24.0 // indirect
|
||||
go.starlark.net v0.0.0-20220817180228-f738f5508c12 // indirect
|
||||
|
|
|
|||
46
go.sum
46
go.sum
|
|
@ -100,7 +100,7 @@ github.com/evanphx/json-patch v5.6.0+incompatible/go.mod h1:50XU6AFN0ol/bzJsmQLi
|
|||
github.com/evanphx/json-patch/v5 v5.6.0 h1:b91NhWfaz02IuVxO9faSllyAtNXHMPkC5J8sJCLunww=
|
||||
github.com/evanphx/json-patch/v5 v5.6.0/go.mod h1:G79N1coSVB93tBe7j6PhzjmR3/2VvlbKOFpnXhI9Bw4=
|
||||
github.com/flowstack/go-jsonschema v0.1.1/go.mod h1:yL7fNggx1o8rm9RlgXv7hTBWxdBM0rVwpMwimd3F3N0=
|
||||
github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE=
|
||||
github.com/frankban/quicktest v1.14.4 h1:g2rn0vABPOOXmZUj+vbmUp0lPoXEMuhTpIluN0XL9UY=
|
||||
github.com/fsnotify/fsnotify v1.6.0 h1:n+5WquG0fcWoWp6xPWfHdbskMCQaFnG6PfBrh1Ky4HY=
|
||||
github.com/fsnotify/fsnotify v1.6.0/go.mod h1:sl3t1tCWJFWoRz9R8WJCbQihKKwmorjAbSClcnxKAGw=
|
||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
|
|
@ -232,8 +232,8 @@ github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:
|
|||
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||
github.com/imdario/mergo v0.3.13 h1:lFzP57bqS/wsqKssCGmtLAb8A0wKjLGrve2q3PPVcBk=
|
||||
github.com/imdario/mergo v0.3.13/go.mod h1:4lJ1jqUDcsbIECGy0RUJAXNIhg+6ocWgb1ALK2O4oXg=
|
||||
github.com/inconshreveable/mousetrap v1.0.1 h1:U3uMjPSQEBMNp1lFxmllqCPM6P5u/Xq7Pgzkat/bFNc=
|
||||
github.com/inconshreveable/mousetrap v1.0.1/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8=
|
||||
github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw=
|
||||
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
|
||||
github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY=
|
||||
github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y=
|
||||
|
|
@ -263,8 +263,8 @@ github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
|||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de h1:9TO3cAIGXtEhnIaL+V+BEER86oLrvS+kWobKpbJuye0=
|
||||
github.com/liggitt/tabwriter v0.0.0-20181228230101-89fcab3d43de/go.mod h1:zAbeS9B/r2mtpb6U+EI2rYA5OAXxsYw6wTamcNW+zcE=
|
||||
github.com/magiconair/properties v1.8.6 h1:5ibWZ6iY0NctNGWo87LalDlEZ6R41TqbbDamhfG/Qzo=
|
||||
github.com/magiconair/properties v1.8.6/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60=
|
||||
github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY=
|
||||
github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0=
|
||||
github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
||||
github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
||||
github.com/mailru/easyjson v0.7.7 h1:UGYAvKxe3sBsEDzO8ZeWOSlIQfWFlxbzLZe7hwFURr0=
|
||||
|
|
@ -296,10 +296,8 @@ github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8
|
|||
github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM=
|
||||
github.com/openzipkin/zipkin-go v0.4.1 h1:kNd/ST2yLLWhaWrkgchya40TJabe8Hioj9udfPcEO5A=
|
||||
github.com/openzipkin/zipkin-go v0.4.1/go.mod h1:qY0VqDSN1pOBN94dBc6w2GJlWLiovAyg7Qt6/I9HecM=
|
||||
github.com/pelletier/go-toml v1.9.5 h1:4yBQzkHv+7BHq2PQUZF3Mx0IYxG7LsP222s7Agd3ve8=
|
||||
github.com/pelletier/go-toml v1.9.5/go.mod h1:u1nR/EPcESfeI/szUZKdtJ0xRNbUoANCkoOuaOx1Y+c=
|
||||
github.com/pelletier/go-toml/v2 v2.0.5 h1:ipoSadvV8oGUjnUbMub59IDPPwfxF694nG/jwbMiyQg=
|
||||
github.com/pelletier/go-toml/v2 v2.0.5/go.mod h1:OMHamSCAODeSsVrwwvcJOaoN0LIUIaFVNZzmWyNfXas=
|
||||
github.com/pelletier/go-toml/v2 v2.0.9 h1:uH2qQXheeefCCkuBBSLi7jCiSmj3VRh2+Goq2N7Xxu0=
|
||||
github.com/pelletier/go-toml/v2 v2.0.9/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
|
||||
github.com/peterbourgon/diskv v2.0.1+incompatible h1:UBdAOUP5p4RWqPBg048CAvpKN+vxiaj6gdUUzhl4XmI=
|
||||
github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU=
|
||||
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
|
|
@ -360,19 +358,19 @@ github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPx
|
|||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||
github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88=
|
||||
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
||||
github.com/spf13/afero v1.9.2 h1:j49Hj62F0n+DaZ1dDCvhABaPNSGNkt32oRFxI33IEMw=
|
||||
github.com/spf13/afero v1.9.2/go.mod h1:iUV7ddyEEZPO5gA3zD4fJt6iStLlL+Lg4m2cihcDf8Y=
|
||||
github.com/spf13/cast v1.5.0 h1:rj3WzYc11XZaIZMPKmwP96zkFEnnAmV8s6XbB2aY32w=
|
||||
github.com/spf13/cast v1.5.0/go.mod h1:SpXXQ5YoyJw6s3/6cMTQuxvgRl3PCJiyaX9p6b155UU=
|
||||
github.com/spf13/cobra v1.6.0 h1:42a0n6jwCot1pUmomAp4T7DeMD+20LFv4Q54pxLf2LI=
|
||||
github.com/spf13/cobra v1.6.0/go.mod h1:IOw/AERYS7UzyrGinqmz6HLUo219MORXGxhbaJUqzrY=
|
||||
github.com/spf13/afero v1.9.5 h1:stMpOSZFs//0Lv29HduCmli3GUfpFoF3Y1Q/aXj/wVM=
|
||||
github.com/spf13/afero v1.9.5/go.mod h1:UBogFpq8E9Hx+xc5CNTTEpTnuHVmXDwZcZcE1eb/UhQ=
|
||||
github.com/spf13/cast v1.5.1 h1:R+kOtfhWQE6TVQzY+4D7wJLBgkdVasCEFxSUBYBYIlA=
|
||||
github.com/spf13/cast v1.5.1/go.mod h1:b9PdjNptOpzXr7Rq1q9gJML/2cdGQAo69NKzQ10KN48=
|
||||
github.com/spf13/cobra v1.7.0 h1:hyqWnYt1ZQShIddO5kBpj3vu05/++x6tJ6dg8EC572I=
|
||||
github.com/spf13/cobra v1.7.0/go.mod h1:uLxZILRyS/50WlhOIKD7W6V5bgeIt+4sICxh6uRMrb0=
|
||||
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
|
||||
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
|
||||
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/spf13/viper v1.13.0 h1:BWSJ/M+f+3nmdz9bxB+bWX28kkALN2ok11D0rSo8EJU=
|
||||
github.com/spf13/viper v1.13.0/go.mod h1:Icm2xNL3/8uyh/wFuB1jI7TiTNKp8632Nwegu+zgdYw=
|
||||
github.com/spf13/viper v1.16.0 h1:rGGH0XDZhdUOryiDWjmIvUSWpbNqisK8Wk0Vyefw8hc=
|
||||
github.com/spf13/viper v1.16.0/go.mod h1:yg78JgCJcbrQOvV9YLXgkLaZqUidkY9K+Dd1FofRzQg=
|
||||
github.com/stoewer/go-strcase v1.2.0/go.mod h1:IBiWB2sKIp3wVVQ3Y035++gc+knqhUQag1KpM8ahLw8=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
|
|
@ -387,11 +385,12 @@ github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/
|
|||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
|
||||
github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk=
|
||||
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
|
||||
github.com/stretchr/testify v1.8.4 h1:CcVxjf3Q8PM0mHUKJCdn+eZZtm5yQwehR5yeSVQQcUk=
|
||||
github.com/stretchr/testify v1.8.4/go.mod h1:sz/lmYIOXD/1dqDmKjjqLyZ2RngseejIcXlSw2iwfAo=
|
||||
github.com/stvp/go-udp-testing v0.0.0-20201019212854-469649b16807/go.mod h1:7jxmlfBCDBXRzr0eAQJ48XC1hBu1np4CS5+cHEYfwpc=
|
||||
github.com/subosito/gotenv v1.4.1 h1:jyEFiXpy21Wm81FBN71l9VoMMV8H8jG+qIK3GCpY6Qs=
|
||||
github.com/subosito/gotenv v1.4.1/go.mod h1:ayKnFf/c6rvx/2iiLrJUk1e6plDbT3edrFNGqEflhK0=
|
||||
github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8=
|
||||
github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU=
|
||||
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
||||
github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU=
|
||||
github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ=
|
||||
|
|
@ -431,7 +430,7 @@ golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8U
|
|||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
|
||||
golang.org/x/crypto v0.0.0-20211108221036-ceb1ce70b4fa/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
|
||||
golang.org/x/crypto v0.0.0-20220722155217-630584e8d5aa/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
|
||||
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
|
||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||
|
|
@ -505,6 +504,7 @@ golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v
|
|||
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
|
||||
golang.org/x/net v0.0.0-20210525063256-abc453219eb5/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20210805182204-aaa1db679c0d/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
|
||||
golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
|
||||
golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk=
|
||||
golang.org/x/net v0.14.0 h1:BONx9s002vGdD9umnlX1Po8vOZmrgH34qlHcD1MfK14=
|
||||
|
|
@ -818,8 +818,8 @@ k8s.io/apiextensions-apiserver v0.26.5 h1:VJ946z9RjyCPn3qiz4Kus/UYjCRrdn1xUvEsJF
|
|||
k8s.io/apiextensions-apiserver v0.26.5/go.mod h1:Olsde7ZNWnyz9rsL13iXYXmL1h7kWujtKeC3yWVCDPo=
|
||||
k8s.io/apimachinery v0.26.5 h1:hTQVhJao2piX7vSgCn4Lwd6E0o/+TJIH4NqRf+q4EmE=
|
||||
k8s.io/apimachinery v0.26.5/go.mod h1:HUvk6wrOP4v22AIYqeCGSQ6xWCHo41J9d6psb3temAg=
|
||||
k8s.io/cli-runtime v0.25.2 h1:XOx+SKRjBpYMLY/J292BHTkmyDffl/qOx3YSuFZkTuc=
|
||||
k8s.io/cli-runtime v0.25.2/go.mod h1:OQx3+/0st6x5YpkkJQlEWLC73V0wHsOFMC1/roxV8Oc=
|
||||
k8s.io/cli-runtime v0.26.5 h1:1YTQt6cWaiyA+6NptNMVqkGkh+BFN9cG+PESgz24//U=
|
||||
k8s.io/cli-runtime v0.26.5/go.mod h1:iZMA8+AVNSownXlJ1h64s59C5/oHSA6hGBarfHjRDl8=
|
||||
k8s.io/client-go v0.26.5 h1:e8Z44pafL/c6ayF/6qYEypbJoDSakaFxhJ9lqULEJEo=
|
||||
k8s.io/client-go v0.26.5/go.mod h1:/CYyNt+ZLMvWqMF8h1SvkUXz2ujFWQLwdDrdiQlZ5X0=
|
||||
k8s.io/code-generator v0.26.5 h1:0p350mqxkbs29h8/yF4AMilApLVUhnRx3EAfhTWR5fY=
|
||||
|
|
|
|||
|
|
@ -1,3 +1,4 @@
|
|||
//go:build !windows
|
||||
// +build !windows
|
||||
|
||||
package mousetrap
|
||||
|
|
|
|||
|
|
@ -1,79 +1,30 @@
|
|||
// +build windows
|
||||
// +build !go1.4
|
||||
|
||||
package mousetrap
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"syscall"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
const (
|
||||
// defined by the Win32 API
|
||||
th32cs_snapprocess uintptr = 0x2
|
||||
)
|
||||
|
||||
var (
|
||||
kernel = syscall.MustLoadDLL("kernel32.dll")
|
||||
CreateToolhelp32Snapshot = kernel.MustFindProc("CreateToolhelp32Snapshot")
|
||||
Process32First = kernel.MustFindProc("Process32FirstW")
|
||||
Process32Next = kernel.MustFindProc("Process32NextW")
|
||||
)
|
||||
|
||||
// ProcessEntry32 structure defined by the Win32 API
|
||||
type processEntry32 struct {
|
||||
dwSize uint32
|
||||
cntUsage uint32
|
||||
th32ProcessID uint32
|
||||
th32DefaultHeapID int
|
||||
th32ModuleID uint32
|
||||
cntThreads uint32
|
||||
th32ParentProcessID uint32
|
||||
pcPriClassBase int32
|
||||
dwFlags uint32
|
||||
szExeFile [syscall.MAX_PATH]uint16
|
||||
}
|
||||
|
||||
func getProcessEntry(pid int) (pe *processEntry32, err error) {
|
||||
snapshot, _, e1 := CreateToolhelp32Snapshot.Call(th32cs_snapprocess, uintptr(0))
|
||||
if snapshot == uintptr(syscall.InvalidHandle) {
|
||||
err = fmt.Errorf("CreateToolhelp32Snapshot: %v", e1)
|
||||
return
|
||||
}
|
||||
defer syscall.CloseHandle(syscall.Handle(snapshot))
|
||||
|
||||
var processEntry processEntry32
|
||||
processEntry.dwSize = uint32(unsafe.Sizeof(processEntry))
|
||||
ok, _, e1 := Process32First.Call(snapshot, uintptr(unsafe.Pointer(&processEntry)))
|
||||
if ok == 0 {
|
||||
err = fmt.Errorf("Process32First: %v", e1)
|
||||
return
|
||||
}
|
||||
|
||||
for {
|
||||
if processEntry.th32ProcessID == uint32(pid) {
|
||||
pe = &processEntry
|
||||
return
|
||||
}
|
||||
|
||||
ok, _, e1 = Process32Next.Call(snapshot, uintptr(unsafe.Pointer(&processEntry)))
|
||||
if ok == 0 {
|
||||
err = fmt.Errorf("Process32Next: %v", e1)
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func getppid() (pid int, err error) {
|
||||
pe, err := getProcessEntry(os.Getpid())
|
||||
func getProcessEntry(pid int) (*syscall.ProcessEntry32, error) {
|
||||
snapshot, err := syscall.CreateToolhelp32Snapshot(syscall.TH32CS_SNAPPROCESS, 0)
|
||||
if err != nil {
|
||||
return
|
||||
return nil, err
|
||||
}
|
||||
defer syscall.CloseHandle(snapshot)
|
||||
var procEntry syscall.ProcessEntry32
|
||||
procEntry.Size = uint32(unsafe.Sizeof(procEntry))
|
||||
if err = syscall.Process32First(snapshot, &procEntry); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for {
|
||||
if procEntry.ProcessID == uint32(pid) {
|
||||
return &procEntry, nil
|
||||
}
|
||||
err = syscall.Process32Next(snapshot, &procEntry)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
pid = int(pe.th32ParentProcessID)
|
||||
return
|
||||
}
|
||||
|
||||
// StartedByExplorer returns true if the program was invoked by the user double-clicking
|
||||
|
|
@ -83,16 +34,9 @@ func getppid() (pid int, err error) {
|
|||
// It does not guarantee that the program was run from a terminal. It only can tell you
|
||||
// whether it was launched from explorer.exe
|
||||
func StartedByExplorer() bool {
|
||||
ppid, err := getppid()
|
||||
pe, err := getProcessEntry(syscall.Getppid())
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
pe, err := getProcessEntry(ppid)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
name := syscall.UTF16ToString(pe.szExeFile[:])
|
||||
return name == "explorer.exe"
|
||||
return "explorer.exe" == syscall.UTF16ToString(pe.ExeFile[:])
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,46 +0,0 @@
|
|||
// +build windows
|
||||
// +build go1.4
|
||||
|
||||
package mousetrap
|
||||
|
||||
import (
|
||||
"os"
|
||||
"syscall"
|
||||
"unsafe"
|
||||
)
|
||||
|
||||
func getProcessEntry(pid int) (*syscall.ProcessEntry32, error) {
|
||||
snapshot, err := syscall.CreateToolhelp32Snapshot(syscall.TH32CS_SNAPPROCESS, 0)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer syscall.CloseHandle(snapshot)
|
||||
var procEntry syscall.ProcessEntry32
|
||||
procEntry.Size = uint32(unsafe.Sizeof(procEntry))
|
||||
if err = syscall.Process32First(snapshot, &procEntry); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for {
|
||||
if procEntry.ProcessID == uint32(pid) {
|
||||
return &procEntry, nil
|
||||
}
|
||||
err = syscall.Process32Next(snapshot, &procEntry)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// StartedByExplorer returns true if the program was invoked by the user double-clicking
|
||||
// on the executable from explorer.exe
|
||||
//
|
||||
// It is conservative and returns false if any of the internal calls fail.
|
||||
// It does not guarantee that the program was run from a terminal. It only can tell you
|
||||
// whether it was launched from explorer.exe
|
||||
func StartedByExplorer() bool {
|
||||
pe, err := getProcessEntry(os.Getppid())
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
return "explorer.exe" == syscall.UTF16ToString(pe.ExeFile[:])
|
||||
}
|
||||
|
|
@ -1,17 +0,0 @@
|
|||
language: go
|
||||
go:
|
||||
- 1.3.x
|
||||
- 1.4.x
|
||||
- 1.5.x
|
||||
- 1.6.x
|
||||
- 1.7.x
|
||||
- 1.8.x
|
||||
- 1.9.x
|
||||
- "1.10.x"
|
||||
- "1.11.x"
|
||||
- "1.12.x"
|
||||
- "1.13.x"
|
||||
- "1.14.x"
|
||||
- "1.15.x"
|
||||
- "1.16.x"
|
||||
- tip
|
||||
|
|
@ -1,5 +1,50 @@
|
|||
## Changelog
|
||||
|
||||
### [1.8.7](https://github.com/magiconair/properties/tree/v1.8.7) - 08 Dec 2022
|
||||
|
||||
* [PR #65](https://github.com/magiconair/properties/pull/65): Speedup Merge
|
||||
|
||||
Thanks to [@AdityaVallabh](https://github.com/AdityaVallabh) for the patch.
|
||||
|
||||
* [PR #66](https://github.com/magiconair/properties/pull/66): use github actions
|
||||
|
||||
### [1.8.6](https://github.com/magiconair/properties/tree/v1.8.6) - 23 Feb 2022
|
||||
|
||||
* [PR #57](https://github.com/magiconair/properties/pull/57):Fix "unreachable code" lint error
|
||||
|
||||
Thanks to [@ellie](https://github.com/ellie) for the patch.
|
||||
|
||||
* [PR #63](https://github.com/magiconair/properties/pull/63): Make TestMustGetParsedDuration backwards compatible
|
||||
|
||||
This patch ensures that the `TestMustGetParsedDuration` still works with `go1.3` to make the
|
||||
author happy until it affects real users.
|
||||
|
||||
Thanks to [@maage](https://github.com/maage) for the patch.
|
||||
|
||||
### [1.8.5](https://github.com/magiconair/properties/tree/v1.8.5) - 24 Mar 2021
|
||||
|
||||
* [PR #55](https://github.com/magiconair/properties/pull/55): Fix: Encoding Bug in Comments
|
||||
|
||||
When reading comments \ are loaded correctly, but when writing they are then
|
||||
replaced by \\. This leads to wrong comments when writing and reading multiple times.
|
||||
|
||||
Thanks to [@doxsch](https://github.com/doxsch) for the patch.
|
||||
|
||||
### [1.8.4](https://github.com/magiconair/properties/tree/v1.8.4) - 23 Sep 2020
|
||||
|
||||
* [PR #50](https://github.com/magiconair/properties/pull/50): enhance error message for circular references
|
||||
|
||||
Thanks to [@sriv](https://github.com/sriv) for the patch.
|
||||
|
||||
### [1.8.3](https://github.com/magiconair/properties/tree/v1.8.3) - 14 Sep 2020
|
||||
|
||||
* [PR #49](https://github.com/magiconair/properties/pull/49): Include the key in error message causing the circular reference
|
||||
|
||||
The change is include the key in the error message which is causing the circular
|
||||
reference when parsing/loading the properties files.
|
||||
|
||||
Thanks to [@haroon-sheikh](https://github.com/haroon-sheikh) for the patch.
|
||||
|
||||
### [1.8.2](https://github.com/magiconair/properties/tree/v1.8.2) - 25 Aug 2020
|
||||
|
||||
* [PR #36](https://github.com/magiconair/properties/pull/36): Escape backslash on write
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018 Frank Schroeder. All rights reserved.
|
||||
// Copyright 2013-2022 Frank Schroeder. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
|
|
@ -48,49 +48,49 @@ import (
|
|||
//
|
||||
// Examples:
|
||||
//
|
||||
// // Field is ignored.
|
||||
// Field int `properties:"-"`
|
||||
// // Field is ignored.
|
||||
// Field int `properties:"-"`
|
||||
//
|
||||
// // Field is assigned value of 'Field'.
|
||||
// Field int
|
||||
// // Field is assigned value of 'Field'.
|
||||
// Field int
|
||||
//
|
||||
// // Field is assigned value of 'myName'.
|
||||
// Field int `properties:"myName"`
|
||||
// // Field is assigned value of 'myName'.
|
||||
// Field int `properties:"myName"`
|
||||
//
|
||||
// // Field is assigned value of key 'myName' and has a default
|
||||
// // value 15 if the key does not exist.
|
||||
// Field int `properties:"myName,default=15"`
|
||||
// // Field is assigned value of key 'myName' and has a default
|
||||
// // value 15 if the key does not exist.
|
||||
// Field int `properties:"myName,default=15"`
|
||||
//
|
||||
// // Field is assigned value of key 'Field' and has a default
|
||||
// // value 15 if the key does not exist.
|
||||
// Field int `properties:",default=15"`
|
||||
// // Field is assigned value of key 'Field' and has a default
|
||||
// // value 15 if the key does not exist.
|
||||
// Field int `properties:",default=15"`
|
||||
//
|
||||
// // Field is assigned value of key 'date' and the date
|
||||
// // is in format 2006-01-02
|
||||
// Field time.Time `properties:"date,layout=2006-01-02"`
|
||||
// // Field is assigned value of key 'date' and the date
|
||||
// // is in format 2006-01-02
|
||||
// Field time.Time `properties:"date,layout=2006-01-02"`
|
||||
//
|
||||
// // Field is assigned the non-empty and whitespace trimmed
|
||||
// // values of key 'Field' split by commas.
|
||||
// Field []string
|
||||
// // Field is assigned the non-empty and whitespace trimmed
|
||||
// // values of key 'Field' split by commas.
|
||||
// Field []string
|
||||
//
|
||||
// // Field is assigned the non-empty and whitespace trimmed
|
||||
// // values of key 'Field' split by commas and has a default
|
||||
// // value ["a", "b", "c"] if the key does not exist.
|
||||
// Field []string `properties:",default=a;b;c"`
|
||||
// // Field is assigned the non-empty and whitespace trimmed
|
||||
// // values of key 'Field' split by commas and has a default
|
||||
// // value ["a", "b", "c"] if the key does not exist.
|
||||
// Field []string `properties:",default=a;b;c"`
|
||||
//
|
||||
// // Field is decoded recursively with "Field." as key prefix.
|
||||
// Field SomeStruct
|
||||
// // Field is decoded recursively with "Field." as key prefix.
|
||||
// Field SomeStruct
|
||||
//
|
||||
// // Field is decoded recursively with "myName." as key prefix.
|
||||
// Field SomeStruct `properties:"myName"`
|
||||
// // Field is decoded recursively with "myName." as key prefix.
|
||||
// Field SomeStruct `properties:"myName"`
|
||||
//
|
||||
// // Field is decoded recursively with "Field." as key prefix
|
||||
// // and the next dotted element of the key as map key.
|
||||
// Field map[string]string
|
||||
// // Field is decoded recursively with "Field." as key prefix
|
||||
// // and the next dotted element of the key as map key.
|
||||
// Field map[string]string
|
||||
//
|
||||
// // Field is decoded recursively with "myName." as key prefix
|
||||
// // and the next dotted element of the key as map key.
|
||||
// Field map[string]string `properties:"myName"`
|
||||
// // Field is decoded recursively with "myName." as key prefix
|
||||
// // and the next dotted element of the key as map key.
|
||||
// Field map[string]string `properties:"myName"`
|
||||
func (p *Properties) Decode(x interface{}) error {
|
||||
t, v := reflect.TypeOf(x), reflect.ValueOf(x)
|
||||
if t.Kind() != reflect.Ptr || v.Elem().Type().Kind() != reflect.Struct {
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018 Frank Schroeder. All rights reserved.
|
||||
// Copyright 2013-2022 Frank Schroeder. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
|
|
@ -13,7 +13,7 @@
|
|||
//
|
||||
// To load a single properties file use MustLoadFile():
|
||||
//
|
||||
// p := properties.MustLoadFile(filename, properties.UTF8)
|
||||
// p := properties.MustLoadFile(filename, properties.UTF8)
|
||||
//
|
||||
// To load multiple properties files use MustLoadFiles()
|
||||
// which loads the files in the given order and merges the
|
||||
|
|
@ -23,25 +23,25 @@
|
|||
// Filenames can contain environment variables which are expanded
|
||||
// before loading.
|
||||
//
|
||||
// f1 := "/etc/myapp/myapp.conf"
|
||||
// f2 := "/home/${USER}/myapp.conf"
|
||||
// p := MustLoadFiles([]string{f1, f2}, properties.UTF8, true)
|
||||
// f1 := "/etc/myapp/myapp.conf"
|
||||
// f2 := "/home/${USER}/myapp.conf"
|
||||
// p := MustLoadFiles([]string{f1, f2}, properties.UTF8, true)
|
||||
//
|
||||
// All of the different key/value delimiters ' ', ':' and '=' are
|
||||
// supported as well as the comment characters '!' and '#' and
|
||||
// multi-line values.
|
||||
//
|
||||
// ! this is a comment
|
||||
// # and so is this
|
||||
// ! this is a comment
|
||||
// # and so is this
|
||||
//
|
||||
// # the following expressions are equal
|
||||
// key value
|
||||
// key=value
|
||||
// key:value
|
||||
// key = value
|
||||
// key : value
|
||||
// key = val\
|
||||
// ue
|
||||
// # the following expressions are equal
|
||||
// key value
|
||||
// key=value
|
||||
// key:value
|
||||
// key = value
|
||||
// key : value
|
||||
// key = val\
|
||||
// ue
|
||||
//
|
||||
// Properties stores all comments preceding a key and provides
|
||||
// GetComments() and SetComments() methods to retrieve and
|
||||
|
|
@ -55,62 +55,62 @@
|
|||
// and malformed expressions are not allowed and cause an
|
||||
// error. Expansion of environment variables is supported.
|
||||
//
|
||||
// # standard property
|
||||
// key = value
|
||||
// # standard property
|
||||
// key = value
|
||||
//
|
||||
// # property expansion: key2 = value
|
||||
// key2 = ${key}
|
||||
// # property expansion: key2 = value
|
||||
// key2 = ${key}
|
||||
//
|
||||
// # recursive expansion: key3 = value
|
||||
// key3 = ${key2}
|
||||
// # recursive expansion: key3 = value
|
||||
// key3 = ${key2}
|
||||
//
|
||||
// # circular reference (error)
|
||||
// key = ${key}
|
||||
// # circular reference (error)
|
||||
// key = ${key}
|
||||
//
|
||||
// # malformed expression (error)
|
||||
// key = ${ke
|
||||
// # malformed expression (error)
|
||||
// key = ${ke
|
||||
//
|
||||
// # refers to the users' home dir
|
||||
// home = ${HOME}
|
||||
// # refers to the users' home dir
|
||||
// home = ${HOME}
|
||||
//
|
||||
// # local key takes precedence over env var: u = foo
|
||||
// USER = foo
|
||||
// u = ${USER}
|
||||
// # local key takes precedence over env var: u = foo
|
||||
// USER = foo
|
||||
// u = ${USER}
|
||||
//
|
||||
// The default property expansion format is ${key} but can be
|
||||
// changed by setting different pre- and postfix values on the
|
||||
// Properties object.
|
||||
//
|
||||
// p := properties.NewProperties()
|
||||
// p.Prefix = "#["
|
||||
// p.Postfix = "]#"
|
||||
// p := properties.NewProperties()
|
||||
// p.Prefix = "#["
|
||||
// p.Postfix = "]#"
|
||||
//
|
||||
// Properties provides convenience functions for getting typed
|
||||
// values with default values if the key does not exist or the
|
||||
// type conversion failed.
|
||||
//
|
||||
// # Returns true if the value is either "1", "on", "yes" or "true"
|
||||
// # Returns false for every other value and the default value if
|
||||
// # the key does not exist.
|
||||
// v = p.GetBool("key", false)
|
||||
// # Returns true if the value is either "1", "on", "yes" or "true"
|
||||
// # Returns false for every other value and the default value if
|
||||
// # the key does not exist.
|
||||
// v = p.GetBool("key", false)
|
||||
//
|
||||
// # Returns the value if the key exists and the format conversion
|
||||
// # was successful. Otherwise, the default value is returned.
|
||||
// v = p.GetInt64("key", 999)
|
||||
// v = p.GetUint64("key", 999)
|
||||
// v = p.GetFloat64("key", 123.0)
|
||||
// v = p.GetString("key", "def")
|
||||
// v = p.GetDuration("key", 999)
|
||||
// # Returns the value if the key exists and the format conversion
|
||||
// # was successful. Otherwise, the default value is returned.
|
||||
// v = p.GetInt64("key", 999)
|
||||
// v = p.GetUint64("key", 999)
|
||||
// v = p.GetFloat64("key", 123.0)
|
||||
// v = p.GetString("key", "def")
|
||||
// v = p.GetDuration("key", 999)
|
||||
//
|
||||
// As an alternative properties may be applied with the standard
|
||||
// library's flag implementation at any time.
|
||||
//
|
||||
// # Standard configuration
|
||||
// v = flag.Int("key", 999, "help message")
|
||||
// flag.Parse()
|
||||
// # Standard configuration
|
||||
// v = flag.Int("key", 999, "help message")
|
||||
// flag.Parse()
|
||||
//
|
||||
// # Merge p into the flag set
|
||||
// p.MustFlag(flag.CommandLine)
|
||||
// # Merge p into the flag set
|
||||
// p.MustFlag(flag.CommandLine)
|
||||
//
|
||||
// Properties provides several MustXXX() convenience functions
|
||||
// which will terminate the app if an error occurs. The behavior
|
||||
|
|
@ -119,30 +119,30 @@
|
|||
// of logging the error set a different ErrorHandler before
|
||||
// you use the Properties package.
|
||||
//
|
||||
// properties.ErrorHandler = properties.PanicHandler
|
||||
// properties.ErrorHandler = properties.PanicHandler
|
||||
//
|
||||
// # Will panic instead of logging an error
|
||||
// p := properties.MustLoadFile("config.properties")
|
||||
// # Will panic instead of logging an error
|
||||
// p := properties.MustLoadFile("config.properties")
|
||||
//
|
||||
// You can also provide your own ErrorHandler function. The only requirement
|
||||
// is that the error handler function must exit after handling the error.
|
||||
//
|
||||
// properties.ErrorHandler = func(err error) {
|
||||
// fmt.Println(err)
|
||||
// os.Exit(1)
|
||||
// }
|
||||
// properties.ErrorHandler = func(err error) {
|
||||
// fmt.Println(err)
|
||||
// os.Exit(1)
|
||||
// }
|
||||
//
|
||||
// # Will write to stdout and then exit
|
||||
// p := properties.MustLoadFile("config.properties")
|
||||
// # Will write to stdout and then exit
|
||||
// p := properties.MustLoadFile("config.properties")
|
||||
//
|
||||
// Properties can also be loaded into a struct via the `Decode`
|
||||
// method, e.g.
|
||||
//
|
||||
// type S struct {
|
||||
// A string `properties:"a,default=foo"`
|
||||
// D time.Duration `properties:"timeout,default=5s"`
|
||||
// E time.Time `properties:"expires,layout=2006-01-02,default=2015-01-01"`
|
||||
// }
|
||||
// type S struct {
|
||||
// A string `properties:"a,default=foo"`
|
||||
// D time.Duration `properties:"timeout,default=5s"`
|
||||
// E time.Time `properties:"expires,layout=2006-01-02,default=2015-01-01"`
|
||||
// }
|
||||
//
|
||||
// See `Decode()` method for the full documentation.
|
||||
//
|
||||
|
|
@ -152,5 +152,4 @@
|
|||
// http://en.wikipedia.org/wiki/.properties
|
||||
//
|
||||
// http://docs.oracle.com/javase/7/docs/api/java/util/Properties.html#load%28java.io.Reader%29
|
||||
//
|
||||
package properties
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018 Frank Schroeder. All rights reserved.
|
||||
// Copyright 2013-2022 Frank Schroeder. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
|
|
@ -10,8 +10,9 @@ import "flag"
|
|||
// the respective key for flag.Flag.Name.
|
||||
//
|
||||
// It's use is recommended with command line arguments as in:
|
||||
// flag.Parse()
|
||||
// p.MustFlag(flag.CommandLine)
|
||||
//
|
||||
// flag.Parse()
|
||||
// p.MustFlag(flag.CommandLine)
|
||||
func (p *Properties) MustFlag(dst *flag.FlagSet) {
|
||||
m := make(map[string]*flag.Flag)
|
||||
dst.VisitAll(func(f *flag.Flag) {
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018 Frank Schroeder. All rights reserved.
|
||||
// Copyright 2013-2022 Frank Schroeder. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
//
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018 Frank Schroeder. All rights reserved.
|
||||
// Copyright 2013-2022 Frank Schroeder. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018 Frank Schroeder. All rights reserved.
|
||||
// Copyright 2013-2022 Frank Schroeder. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018 Frank Schroeder. All rights reserved.
|
||||
// Copyright 2013-2022 Frank Schroeder. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
|
|
@ -700,22 +700,17 @@ func (p *Properties) Delete(key string) {
|
|||
|
||||
// Merge merges properties, comments and keys from other *Properties into p
|
||||
func (p *Properties) Merge(other *Properties) {
|
||||
for _, k := range other.k {
|
||||
if _, ok := p.m[k]; !ok {
|
||||
p.k = append(p.k, k)
|
||||
}
|
||||
}
|
||||
for k, v := range other.m {
|
||||
p.m[k] = v
|
||||
}
|
||||
for k, v := range other.c {
|
||||
p.c[k] = v
|
||||
}
|
||||
|
||||
outer:
|
||||
for _, otherKey := range other.k {
|
||||
for _, key := range p.k {
|
||||
if otherKey == key {
|
||||
continue outer
|
||||
}
|
||||
}
|
||||
p.k = append(p.k, otherKey)
|
||||
}
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------------
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2018 Frank Schroeder. All rights reserved.
|
||||
// Copyright 2013-2022 Frank Schroeder. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
|
|
|
|||
|
|
@ -1,2 +0,0 @@
|
|||
cmd/tomll/tomll
|
||||
cmd/tomljson/tomljson
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
test_program/test_program_bin
|
||||
fuzz/
|
||||
cmd/tomll/tomll
|
||||
cmd/tomljson/tomljson
|
||||
cmd/tomltestgen/tomltestgen
|
||||
|
|
@ -1,132 +0,0 @@
|
|||
## Contributing
|
||||
|
||||
Thank you for your interest in go-toml! We appreciate you considering
|
||||
contributing to go-toml!
|
||||
|
||||
The main goal is the project is to provide an easy-to-use TOML
|
||||
implementation for Go that gets the job done and gets out of your way –
|
||||
dealing with TOML is probably not the central piece of your project.
|
||||
|
||||
As the single maintainer of go-toml, time is scarce. All help, big or
|
||||
small, is more than welcomed!
|
||||
|
||||
### Ask questions
|
||||
|
||||
Any question you may have, somebody else might have it too. Always feel
|
||||
free to ask them on the [issues tracker][issues-tracker]. We will try to
|
||||
answer them as clearly and quickly as possible, time permitting.
|
||||
|
||||
Asking questions also helps us identify areas where the documentation needs
|
||||
improvement, or new features that weren't envisioned before. Sometimes, a
|
||||
seemingly innocent question leads to the fix of a bug. Don't hesitate and
|
||||
ask away!
|
||||
|
||||
### Improve the documentation
|
||||
|
||||
The best way to share your knowledge and experience with go-toml is to
|
||||
improve the documentation. Fix a typo, clarify an interface, add an
|
||||
example, anything goes!
|
||||
|
||||
The documentation is present in the [README][readme] and thorough the
|
||||
source code. On release, it gets updated on [pkg.go.dev][pkg.go.dev]. To make a
|
||||
change to the documentation, create a pull request with your proposed
|
||||
changes. For simple changes like that, the easiest way to go is probably
|
||||
the "Fork this project and edit the file" button on Github, displayed at
|
||||
the top right of the file. Unless it's a trivial change (for example a
|
||||
typo), provide a little bit of context in your pull request description or
|
||||
commit message.
|
||||
|
||||
### Report a bug
|
||||
|
||||
Found a bug! Sorry to hear that :(. Help us and other track them down and
|
||||
fix by reporting it. [File a new bug report][bug-report] on the [issues
|
||||
tracker][issues-tracker]. The template should provide enough guidance on
|
||||
what to include. When in doubt: add more details! By reducing ambiguity and
|
||||
providing more information, it decreases back and forth and saves everyone
|
||||
time.
|
||||
|
||||
### Code changes
|
||||
|
||||
Want to contribute a patch? Very happy to hear that!
|
||||
|
||||
First, some high-level rules:
|
||||
|
||||
* A short proposal with some POC code is better than a lengthy piece of
|
||||
text with no code. Code speaks louder than words.
|
||||
* No backward-incompatible patch will be accepted unless discussed.
|
||||
Sometimes it's hard, and Go's lack of versioning by default does not
|
||||
help, but we try not to break people's programs unless we absolutely have
|
||||
to.
|
||||
* If you are writing a new feature or extending an existing one, make sure
|
||||
to write some documentation.
|
||||
* Bug fixes need to be accompanied with regression tests.
|
||||
* New code needs to be tested.
|
||||
* Your commit messages need to explain why the change is needed, even if
|
||||
already included in the PR description.
|
||||
|
||||
It does sound like a lot, but those best practices are here to save time
|
||||
overall and continuously improve the quality of the project, which is
|
||||
something everyone benefits from.
|
||||
|
||||
#### Get started
|
||||
|
||||
The fairly standard code contribution process looks like that:
|
||||
|
||||
1. [Fork the project][fork].
|
||||
2. Make your changes, commit on any branch you like.
|
||||
3. [Open up a pull request][pull-request]
|
||||
4. Review, potential ask for changes.
|
||||
5. Merge. You're in!
|
||||
|
||||
Feel free to ask for help! You can create draft pull requests to gather
|
||||
some early feedback!
|
||||
|
||||
#### Run the tests
|
||||
|
||||
You can run tests for go-toml using Go's test tool: `go test ./...`.
|
||||
When creating a pull requests, all tests will be ran on Linux on a few Go
|
||||
versions (Travis CI), and on Windows using the latest Go version
|
||||
(AppVeyor).
|
||||
|
||||
#### Style
|
||||
|
||||
Try to look around and follow the same format and structure as the rest of
|
||||
the code. We enforce using `go fmt` on the whole code base.
|
||||
|
||||
---
|
||||
|
||||
### Maintainers-only
|
||||
|
||||
#### Merge pull request
|
||||
|
||||
Checklist:
|
||||
|
||||
* Passing CI.
|
||||
* Does not introduce backward-incompatible changes (unless discussed).
|
||||
* Has relevant doc changes.
|
||||
* Has relevant unit tests.
|
||||
|
||||
1. Merge using "squash and merge".
|
||||
2. Make sure to edit the commit message to keep all the useful information
|
||||
nice and clean.
|
||||
3. Make sure the commit title is clear and contains the PR number (#123).
|
||||
|
||||
#### New release
|
||||
|
||||
1. Go to [releases][releases]. Click on "X commits to master since this
|
||||
release".
|
||||
2. Make note of all the changes. Look for backward incompatible changes,
|
||||
new features, and bug fixes.
|
||||
3. Pick the new version using the above and semver.
|
||||
4. Create a [new release][new-release].
|
||||
5. Follow the same format as [1.1.0][release-110].
|
||||
|
||||
[issues-tracker]: https://github.com/pelletier/go-toml/issues
|
||||
[bug-report]: https://github.com/pelletier/go-toml/issues/new?template=bug_report.md
|
||||
[pkg.go.dev]: https://pkg.go.dev/github.com/pelletier/go-toml
|
||||
[readme]: ./README.md
|
||||
[fork]: https://help.github.com/articles/fork-a-repo
|
||||
[pull-request]: https://help.github.com/en/articles/creating-a-pull-request
|
||||
[releases]: https://github.com/pelletier/go-toml/releases
|
||||
[new-release]: https://github.com/pelletier/go-toml/releases/new
|
||||
[release-110]: https://github.com/pelletier/go-toml/releases/tag/v1.1.0
|
||||
|
|
@ -1,11 +0,0 @@
|
|||
FROM golang:1.12-alpine3.9 as builder
|
||||
WORKDIR /go/src/github.com/pelletier/go-toml
|
||||
COPY . .
|
||||
ENV CGO_ENABLED=0
|
||||
ENV GOOS=linux
|
||||
RUN go install ./...
|
||||
|
||||
FROM scratch
|
||||
COPY --from=builder /go/bin/tomll /usr/bin/tomll
|
||||
COPY --from=builder /go/bin/tomljson /usr/bin/tomljson
|
||||
COPY --from=builder /go/bin/jsontoml /usr/bin/jsontoml
|
||||
|
|
@ -1,247 +0,0 @@
|
|||
The bulk of github.com/pelletier/go-toml is distributed under the MIT license
|
||||
(see below), with the exception of localtime.go and localtime.test.go.
|
||||
Those two files have been copied over from Google's civil library at revision
|
||||
ed46f5086358513cf8c25f8e3f022cb838a49d66, and are distributed under the Apache
|
||||
2.0 license (see below).
|
||||
|
||||
|
||||
github.com/pelletier/go-toml:
|
||||
|
||||
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2013 - 2021 Thomas Pelletier, Eric Anderton
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
|
||||
|
||||
localtime.go, localtime_test.go:
|
||||
|
||||
Originals:
|
||||
https://raw.githubusercontent.com/googleapis/google-cloud-go/ed46f5086358513cf8c25f8e3f022cb838a49d66/civil/civil.go
|
||||
https://raw.githubusercontent.com/googleapis/google-cloud-go/ed46f5086358513cf8c25f8e3f022cb838a49d66/civil/civil_test.go
|
||||
Changes:
|
||||
* Renamed files from civil* to localtime*.
|
||||
* Package changed from civil to toml.
|
||||
* 'Local' prefix added to all structs.
|
||||
License:
|
||||
https://raw.githubusercontent.com/googleapis/google-cloud-go/ed46f5086358513cf8c25f8e3f022cb838a49d66/LICENSE
|
||||
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
|
@ -1,29 +0,0 @@
|
|||
export CGO_ENABLED=0
|
||||
go := go
|
||||
go.goos ?= $(shell echo `go version`|cut -f4 -d ' '|cut -d '/' -f1)
|
||||
go.goarch ?= $(shell echo `go version`|cut -f4 -d ' '|cut -d '/' -f2)
|
||||
|
||||
out.tools := tomll tomljson jsontoml
|
||||
out.dist := $(out.tools:=_$(go.goos)_$(go.goarch).tar.xz)
|
||||
sources := $(wildcard **/*.go)
|
||||
|
||||
|
||||
.PHONY:
|
||||
tools: $(out.tools)
|
||||
|
||||
$(out.tools): $(sources)
|
||||
GOOS=$(go.goos) GOARCH=$(go.goarch) $(go) build ./cmd/$@
|
||||
|
||||
.PHONY:
|
||||
dist: $(out.dist)
|
||||
|
||||
$(out.dist):%_$(go.goos)_$(go.goarch).tar.xz: %
|
||||
if [ "$(go.goos)" = "windows" ]; then \
|
||||
tar -cJf $@ $^.exe; \
|
||||
else \
|
||||
tar -cJf $@ $^; \
|
||||
fi
|
||||
|
||||
.PHONY:
|
||||
clean:
|
||||
rm -rf $(out.tools) $(out.dist)
|
||||
|
|
@ -1,5 +0,0 @@
|
|||
**Issue:** add link to pelletier/go-toml issue here
|
||||
|
||||
Explanation of what this pull request does.
|
||||
|
||||
More detailed description of the decisions being made and the reasons why (if the patch is non-trivial).
|
||||
|
|
@ -1,176 +0,0 @@
|
|||
# go-toml
|
||||
|
||||
Go library for the [TOML](https://toml.io/) format.
|
||||
|
||||
This library supports TOML version
|
||||
[v1.0.0-rc.3](https://toml.io/en/v1.0.0-rc.3)
|
||||
|
||||
[](https://pkg.go.dev/github.com/pelletier/go-toml)
|
||||
[](https://github.com/pelletier/go-toml/blob/master/LICENSE)
|
||||
[](https://dev.azure.com/pelletierthomas/go-toml-ci/_build/latest?definitionId=1&branchName=master)
|
||||
[](https://codecov.io/gh/pelletier/go-toml)
|
||||
[](https://goreportcard.com/report/github.com/pelletier/go-toml)
|
||||
[](https://app.fossa.io/projects/git%2Bgithub.com%2Fpelletier%2Fgo-toml?ref=badge_shield)
|
||||
|
||||
|
||||
## Development status
|
||||
|
||||
**ℹ️ Consider go-toml v2!**
|
||||
|
||||
The next version of go-toml is in [active development][v2-dev], and
|
||||
[nearing completion][v2-map].
|
||||
|
||||
Though technically in beta, v2 is already more tested, [fixes bugs][v1-bugs],
|
||||
and [much faster][v2-bench]. If you only need reading and writing TOML documents
|
||||
(majority of cases), those features are implemented and the API unlikely to
|
||||
change.
|
||||
|
||||
The remaining features will be added shortly. While pull-requests are welcome on
|
||||
v1, no active development is expected on it. When v2.0.0 is released, v1 will be
|
||||
deprecated.
|
||||
|
||||
👉 [go-toml v2][v2]
|
||||
|
||||
[v2]: https://github.com/pelletier/go-toml/tree/v2
|
||||
[v2-map]: https://github.com/pelletier/go-toml/discussions/506
|
||||
[v2-dev]: https://github.com/pelletier/go-toml/tree/v2
|
||||
[v1-bugs]: https://github.com/pelletier/go-toml/issues?q=is%3Aissue+is%3Aopen+label%3Av2-fixed
|
||||
[v2-bench]: https://github.com/pelletier/go-toml/tree/v2#benchmarks
|
||||
|
||||
## Features
|
||||
|
||||
Go-toml provides the following features for using data parsed from TOML documents:
|
||||
|
||||
* Load TOML documents from files and string data
|
||||
* Easily navigate TOML structure using Tree
|
||||
* Marshaling and unmarshaling to and from data structures
|
||||
* Line & column position data for all parsed elements
|
||||
* [Query support similar to JSON-Path](query/)
|
||||
* Syntax errors contain line and column numbers
|
||||
|
||||
## Import
|
||||
|
||||
```go
|
||||
import "github.com/pelletier/go-toml"
|
||||
```
|
||||
|
||||
## Usage example
|
||||
|
||||
Read a TOML document:
|
||||
|
||||
```go
|
||||
config, _ := toml.Load(`
|
||||
[postgres]
|
||||
user = "pelletier"
|
||||
password = "mypassword"`)
|
||||
// retrieve data directly
|
||||
user := config.Get("postgres.user").(string)
|
||||
|
||||
// or using an intermediate object
|
||||
postgresConfig := config.Get("postgres").(*toml.Tree)
|
||||
password := postgresConfig.Get("password").(string)
|
||||
```
|
||||
|
||||
Or use Unmarshal:
|
||||
|
||||
```go
|
||||
type Postgres struct {
|
||||
User string
|
||||
Password string
|
||||
}
|
||||
type Config struct {
|
||||
Postgres Postgres
|
||||
}
|
||||
|
||||
doc := []byte(`
|
||||
[Postgres]
|
||||
User = "pelletier"
|
||||
Password = "mypassword"`)
|
||||
|
||||
config := Config{}
|
||||
toml.Unmarshal(doc, &config)
|
||||
fmt.Println("user=", config.Postgres.User)
|
||||
```
|
||||
|
||||
Or use a query:
|
||||
|
||||
```go
|
||||
// use a query to gather elements without walking the tree
|
||||
q, _ := query.Compile("$..[user,password]")
|
||||
results := q.Execute(config)
|
||||
for ii, item := range results.Values() {
|
||||
fmt.Printf("Query result %d: %v\n", ii, item)
|
||||
}
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
The documentation and additional examples are available at
|
||||
[pkg.go.dev](https://pkg.go.dev/github.com/pelletier/go-toml).
|
||||
|
||||
## Tools
|
||||
|
||||
Go-toml provides three handy command line tools:
|
||||
|
||||
* `tomll`: Reads TOML files and lints them.
|
||||
|
||||
```
|
||||
go install github.com/pelletier/go-toml/cmd/tomll
|
||||
tomll --help
|
||||
```
|
||||
* `tomljson`: Reads a TOML file and outputs its JSON representation.
|
||||
|
||||
```
|
||||
go install github.com/pelletier/go-toml/cmd/tomljson
|
||||
tomljson --help
|
||||
```
|
||||
|
||||
* `jsontoml`: Reads a JSON file and outputs a TOML representation.
|
||||
|
||||
```
|
||||
go install github.com/pelletier/go-toml/cmd/jsontoml
|
||||
jsontoml --help
|
||||
```
|
||||
|
||||
### Docker image
|
||||
|
||||
Those tools are also available as a Docker image from
|
||||
[dockerhub](https://hub.docker.com/r/pelletier/go-toml). For example, to
|
||||
use `tomljson`:
|
||||
|
||||
```
|
||||
docker run -v $PWD:/workdir pelletier/go-toml tomljson /workdir/example.toml
|
||||
```
|
||||
|
||||
Only master (`latest`) and tagged versions are published to dockerhub. You
|
||||
can build your own image as usual:
|
||||
|
||||
```
|
||||
docker build -t go-toml .
|
||||
```
|
||||
|
||||
## Contribute
|
||||
|
||||
Feel free to report bugs and patches using GitHub's pull requests system on
|
||||
[pelletier/go-toml](https://github.com/pelletier/go-toml). Any feedback would be
|
||||
much appreciated!
|
||||
|
||||
### Run tests
|
||||
|
||||
`go test ./...`
|
||||
|
||||
### Fuzzing
|
||||
|
||||
The script `./fuzz.sh` is available to
|
||||
run [go-fuzz](https://github.com/dvyukov/go-fuzz) on go-toml.
|
||||
|
||||
## Versioning
|
||||
|
||||
Go-toml follows [Semantic Versioning](http://semver.org/). The supported version
|
||||
of [TOML](https://github.com/toml-lang/toml) is indicated at the beginning of
|
||||
this document. The last two major versions of Go are supported
|
||||
(see [Go Release Policy](https://golang.org/doc/devel/release.html#policy)).
|
||||
|
||||
## License
|
||||
|
||||
The MIT License (MIT) + Apache 2.0. Read [LICENSE](LICENSE).
|
||||
|
|
@ -1,19 +0,0 @@
|
|||
# Security Policy
|
||||
|
||||
## Supported Versions
|
||||
|
||||
Use this section to tell people about which versions of your project are
|
||||
currently being supported with security updates.
|
||||
|
||||
| Version | Supported |
|
||||
| ---------- | ------------------ |
|
||||
| Latest 2.x | :white_check_mark: |
|
||||
| All 1.x | :x: |
|
||||
| All 0.x | :x: |
|
||||
|
||||
## Reporting a Vulnerability
|
||||
|
||||
Email a vulnerability report to `security@pelletier.codes`. Make sure to include
|
||||
as many details as possible to reproduce the vulnerability. This is a
|
||||
side-project: I will try to get back to you as quickly as possible, time
|
||||
permitting in my personal life. Providing a working patch helps very much!
|
||||
|
|
@ -1,188 +0,0 @@
|
|||
trigger:
|
||||
- master
|
||||
|
||||
stages:
|
||||
- stage: run_checks
|
||||
displayName: "Check"
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- job: fmt
|
||||
displayName: "fmt"
|
||||
pool:
|
||||
vmImage: ubuntu-latest
|
||||
steps:
|
||||
- task: GoTool@0
|
||||
displayName: "Install Go 1.16"
|
||||
inputs:
|
||||
version: "1.16"
|
||||
- task: Go@0
|
||||
displayName: "go fmt ./..."
|
||||
inputs:
|
||||
command: 'custom'
|
||||
customCommand: 'fmt'
|
||||
arguments: './...'
|
||||
- job: coverage
|
||||
displayName: "coverage"
|
||||
pool:
|
||||
vmImage: ubuntu-latest
|
||||
steps:
|
||||
- task: GoTool@0
|
||||
displayName: "Install Go 1.16"
|
||||
inputs:
|
||||
version: "1.16"
|
||||
- task: Go@0
|
||||
displayName: "Generate coverage"
|
||||
inputs:
|
||||
command: 'test'
|
||||
arguments: "-race -coverprofile=coverage.txt -covermode=atomic"
|
||||
- task: Bash@3
|
||||
inputs:
|
||||
targetType: 'inline'
|
||||
script: 'bash <(curl -s https://codecov.io/bash) -t ${CODECOV_TOKEN}'
|
||||
env:
|
||||
CODECOV_TOKEN: $(CODECOV_TOKEN)
|
||||
- job: benchmark
|
||||
displayName: "benchmark"
|
||||
pool:
|
||||
vmImage: ubuntu-latest
|
||||
steps:
|
||||
- task: GoTool@0
|
||||
displayName: "Install Go 1.16"
|
||||
inputs:
|
||||
version: "1.16"
|
||||
- script: echo "##vso[task.setvariable variable=PATH]${PATH}:/home/vsts/go/bin/"
|
||||
- task: Bash@3
|
||||
inputs:
|
||||
filePath: './benchmark.sh'
|
||||
arguments: "master $(Build.Repository.Uri)"
|
||||
|
||||
- job: go_unit_tests
|
||||
displayName: "unit tests"
|
||||
strategy:
|
||||
matrix:
|
||||
linux 1.16:
|
||||
goVersion: '1.16'
|
||||
imageName: 'ubuntu-latest'
|
||||
mac 1.16:
|
||||
goVersion: '1.16'
|
||||
imageName: 'macOS-latest'
|
||||
windows 1.16:
|
||||
goVersion: '1.16'
|
||||
imageName: 'windows-latest'
|
||||
linux 1.15:
|
||||
goVersion: '1.15'
|
||||
imageName: 'ubuntu-latest'
|
||||
mac 1.15:
|
||||
goVersion: '1.15'
|
||||
imageName: 'macOS-latest'
|
||||
windows 1.15:
|
||||
goVersion: '1.15'
|
||||
imageName: 'windows-latest'
|
||||
pool:
|
||||
vmImage: $(imageName)
|
||||
steps:
|
||||
- task: GoTool@0
|
||||
displayName: "Install Go $(goVersion)"
|
||||
inputs:
|
||||
version: $(goVersion)
|
||||
- task: Go@0
|
||||
displayName: "go test ./..."
|
||||
inputs:
|
||||
command: 'test'
|
||||
arguments: './...'
|
||||
- stage: build_binaries
|
||||
displayName: "Build binaries"
|
||||
dependsOn: run_checks
|
||||
jobs:
|
||||
- job: build_binary
|
||||
displayName: "Build binary"
|
||||
strategy:
|
||||
matrix:
|
||||
linux_amd64:
|
||||
GOOS: linux
|
||||
GOARCH: amd64
|
||||
darwin_amd64:
|
||||
GOOS: darwin
|
||||
GOARCH: amd64
|
||||
windows_amd64:
|
||||
GOOS: windows
|
||||
GOARCH: amd64
|
||||
pool:
|
||||
vmImage: ubuntu-latest
|
||||
steps:
|
||||
- task: GoTool@0
|
||||
displayName: "Install Go"
|
||||
inputs:
|
||||
version: 1.16
|
||||
- task: Bash@3
|
||||
inputs:
|
||||
targetType: inline
|
||||
script: "make dist"
|
||||
env:
|
||||
go.goos: $(GOOS)
|
||||
go.goarch: $(GOARCH)
|
||||
- task: CopyFiles@2
|
||||
inputs:
|
||||
sourceFolder: '$(Build.SourcesDirectory)'
|
||||
contents: '*.tar.xz'
|
||||
TargetFolder: '$(Build.ArtifactStagingDirectory)'
|
||||
- task: PublishBuildArtifacts@1
|
||||
inputs:
|
||||
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
|
||||
artifactName: binaries
|
||||
- stage: build_binaries_manifest
|
||||
displayName: "Build binaries manifest"
|
||||
dependsOn: build_binaries
|
||||
jobs:
|
||||
- job: build_manifest
|
||||
displayName: "Build binaries manifest"
|
||||
steps:
|
||||
- task: DownloadBuildArtifacts@0
|
||||
inputs:
|
||||
buildType: 'current'
|
||||
downloadType: 'single'
|
||||
artifactName: 'binaries'
|
||||
downloadPath: '$(Build.SourcesDirectory)'
|
||||
- task: Bash@3
|
||||
inputs:
|
||||
targetType: inline
|
||||
script: "cd binaries && sha256sum --binary *.tar.xz | tee $(Build.ArtifactStagingDirectory)/sha256sums.txt"
|
||||
- task: PublishBuildArtifacts@1
|
||||
inputs:
|
||||
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
|
||||
artifactName: manifest
|
||||
|
||||
- stage: build_docker_image
|
||||
displayName: "Build Docker image"
|
||||
dependsOn: run_checks
|
||||
jobs:
|
||||
- job: build
|
||||
displayName: "Build"
|
||||
pool:
|
||||
vmImage: ubuntu-latest
|
||||
steps:
|
||||
- task: Docker@2
|
||||
inputs:
|
||||
command: 'build'
|
||||
Dockerfile: 'Dockerfile'
|
||||
buildContext: '.'
|
||||
addPipelineData: false
|
||||
|
||||
- stage: publish_docker_image
|
||||
displayName: "Publish Docker image"
|
||||
dependsOn: build_docker_image
|
||||
condition: and(succeeded(), eq(variables['Build.SourceBranchName'], 'master'))
|
||||
jobs:
|
||||
- job: publish
|
||||
displayName: "Publish"
|
||||
pool:
|
||||
vmImage: ubuntu-latest
|
||||
steps:
|
||||
- task: Docker@2
|
||||
inputs:
|
||||
containerRegistry: 'DockerHub'
|
||||
repository: 'pelletier/go-toml'
|
||||
command: 'buildAndPush'
|
||||
Dockerfile: 'Dockerfile'
|
||||
buildContext: '.'
|
||||
tags: 'latest'
|
||||
|
|
@ -1,35 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -ex
|
||||
|
||||
reference_ref=${1:-master}
|
||||
reference_git=${2:-.}
|
||||
|
||||
if ! `hash benchstat 2>/dev/null`; then
|
||||
echo "Installing benchstat"
|
||||
go get golang.org/x/perf/cmd/benchstat
|
||||
fi
|
||||
|
||||
tempdir=`mktemp -d /tmp/go-toml-benchmark-XXXXXX`
|
||||
ref_tempdir="${tempdir}/ref"
|
||||
ref_benchmark="${ref_tempdir}/benchmark-`echo -n ${reference_ref}|tr -s '/' '-'`.txt"
|
||||
local_benchmark="`pwd`/benchmark-local.txt"
|
||||
|
||||
echo "=== ${reference_ref} (${ref_tempdir})"
|
||||
git clone ${reference_git} ${ref_tempdir} >/dev/null 2>/dev/null
|
||||
pushd ${ref_tempdir} >/dev/null
|
||||
git checkout ${reference_ref} >/dev/null 2>/dev/null
|
||||
go test -bench=. -benchmem | tee ${ref_benchmark}
|
||||
cd benchmark
|
||||
go test -bench=. -benchmem | tee -a ${ref_benchmark}
|
||||
popd >/dev/null
|
||||
|
||||
echo ""
|
||||
echo "=== local"
|
||||
go test -bench=. -benchmem | tee ${local_benchmark}
|
||||
cd benchmark
|
||||
go test -bench=. -benchmem | tee -a ${local_benchmark}
|
||||
|
||||
echo ""
|
||||
echo "=== diff"
|
||||
benchstat -delta-test=none ${ref_benchmark} ${local_benchmark}
|
||||
|
|
@ -1,23 +0,0 @@
|
|||
// Package toml is a TOML parser and manipulation library.
|
||||
//
|
||||
// This version supports the specification as described in
|
||||
// https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.5.0.md
|
||||
//
|
||||
// Marshaling
|
||||
//
|
||||
// Go-toml can marshal and unmarshal TOML documents from and to data
|
||||
// structures.
|
||||
//
|
||||
// TOML document as a tree
|
||||
//
|
||||
// Go-toml can operate on a TOML document as a tree. Use one of the Load*
|
||||
// functions to parse TOML data and obtain a Tree instance, then one of its
|
||||
// methods to manipulate the tree.
|
||||
//
|
||||
// JSONPath-like queries
|
||||
//
|
||||
// The package github.com/pelletier/go-toml/query implements a system
|
||||
// similar to JSONPath to quickly retrieve elements of a TOML document using a
|
||||
// single expression. See the package documentation for more information.
|
||||
//
|
||||
package toml
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
# This is a TOML document. Boom.
|
||||
|
||||
title = "TOML Example"
|
||||
|
||||
[owner]
|
||||
name = "Tom Preston-Werner"
|
||||
organization = "GitHub"
|
||||
bio = "GitHub Cofounder & CEO\nLikes tater tots and beer."
|
||||
dob = 1979-05-27T07:32:00Z # First class dates? Why not?
|
||||
|
||||
[database]
|
||||
server = "192.168.1.1"
|
||||
ports = [ 8001, 8001, 8002 ]
|
||||
connection_max = 5000
|
||||
enabled = true
|
||||
|
||||
[servers]
|
||||
|
||||
# You can indent as you please. Tabs or spaces. TOML don't care.
|
||||
[servers.alpha]
|
||||
ip = "10.0.0.1"
|
||||
dc = "eqdc10"
|
||||
|
||||
[servers.beta]
|
||||
ip = "10.0.0.2"
|
||||
dc = "eqdc10"
|
||||
|
||||
[clients]
|
||||
data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
|
||||
score = 4e-08 # to make sure leading zeroes in exponent parts of floats are supported
|
||||
|
|
@ -1,30 +0,0 @@
|
|||
# This is a TOML document. Boom.
|
||||
|
||||
title = "TOML Example"
|
||||
|
||||
[owner]
|
||||
name = "Tom Preston-Werner"
|
||||
organization = "GitHub"
|
||||
bio = "GitHub Cofounder & CEO\nLikes tater tots and beer."
|
||||
dob = 1979-05-27T07:32:00Z # First class dates? Why not?
|
||||
|
||||
[database]
|
||||
server = "192.168.1.1"
|
||||
ports = [ 8001, 8001, 8002 ]
|
||||
connection_max = 5000
|
||||
enabled = true
|
||||
|
||||
[servers]
|
||||
|
||||
# You can indent as you please. Tabs or spaces. TOML don't care.
|
||||
[servers.alpha]
|
||||
ip = "10.0.0.1"
|
||||
dc = "eqdc10"
|
||||
|
||||
[servers.beta]
|
||||
ip = "10.0.0.2"
|
||||
dc = "eqdc10"
|
||||
|
||||
[clients]
|
||||
data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
|
||||
score = 4e-08 # to make sure leading zeroes in exponent parts of floats are supported
|
||||
|
|
@ -1,31 +0,0 @@
|
|||
// +build gofuzz
|
||||
|
||||
package toml
|
||||
|
||||
func Fuzz(data []byte) int {
|
||||
tree, err := LoadBytes(data)
|
||||
if err != nil {
|
||||
if tree != nil {
|
||||
panic("tree must be nil if there is an error")
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
str, err := tree.ToTomlString()
|
||||
if err != nil {
|
||||
if str != "" {
|
||||
panic(`str must be "" if there is an error`)
|
||||
}
|
||||
panic(err)
|
||||
}
|
||||
|
||||
tree, err = Load(str)
|
||||
if err != nil {
|
||||
if tree != nil {
|
||||
panic("tree must be nil if there is an error")
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
return 1
|
||||
}
|
||||
|
|
@ -1,15 +0,0 @@
|
|||
#! /bin/sh
|
||||
set -eu
|
||||
|
||||
go get github.com/dvyukov/go-fuzz/go-fuzz
|
||||
go get github.com/dvyukov/go-fuzz/go-fuzz-build
|
||||
|
||||
if [ ! -e toml-fuzz.zip ]; then
|
||||
go-fuzz-build github.com/pelletier/go-toml
|
||||
fi
|
||||
|
||||
rm -fr fuzz
|
||||
mkdir -p fuzz/corpus
|
||||
cp *.toml fuzz/corpus
|
||||
|
||||
go-fuzz -bin=toml-fuzz.zip -workdir=fuzz
|
||||
|
|
@ -1,112 +0,0 @@
|
|||
// Parsing keys handling both bare and quoted keys.
|
||||
|
||||
package toml
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// Convert the bare key group string to an array.
|
||||
// The input supports double quotation and single quotation,
|
||||
// but escape sequences are not supported. Lexers must unescape them beforehand.
|
||||
func parseKey(key string) ([]string, error) {
|
||||
runes := []rune(key)
|
||||
var groups []string
|
||||
|
||||
if len(key) == 0 {
|
||||
return nil, errors.New("empty key")
|
||||
}
|
||||
|
||||
idx := 0
|
||||
for idx < len(runes) {
|
||||
for ; idx < len(runes) && isSpace(runes[idx]); idx++ {
|
||||
// skip leading whitespace
|
||||
}
|
||||
if idx >= len(runes) {
|
||||
break
|
||||
}
|
||||
r := runes[idx]
|
||||
if isValidBareChar(r) {
|
||||
// parse bare key
|
||||
startIdx := idx
|
||||
endIdx := -1
|
||||
idx++
|
||||
for idx < len(runes) {
|
||||
r = runes[idx]
|
||||
if isValidBareChar(r) {
|
||||
idx++
|
||||
} else if r == '.' {
|
||||
endIdx = idx
|
||||
break
|
||||
} else if isSpace(r) {
|
||||
endIdx = idx
|
||||
for ; idx < len(runes) && isSpace(runes[idx]); idx++ {
|
||||
// skip trailing whitespace
|
||||
}
|
||||
if idx < len(runes) && runes[idx] != '.' {
|
||||
return nil, fmt.Errorf("invalid key character after whitespace: %c", runes[idx])
|
||||
}
|
||||
break
|
||||
} else {
|
||||
return nil, fmt.Errorf("invalid bare key character: %c", r)
|
||||
}
|
||||
}
|
||||
if endIdx == -1 {
|
||||
endIdx = idx
|
||||
}
|
||||
groups = append(groups, string(runes[startIdx:endIdx]))
|
||||
} else if r == '\'' {
|
||||
// parse single quoted key
|
||||
idx++
|
||||
startIdx := idx
|
||||
for {
|
||||
if idx >= len(runes) {
|
||||
return nil, fmt.Errorf("unclosed single-quoted key")
|
||||
}
|
||||
r = runes[idx]
|
||||
if r == '\'' {
|
||||
groups = append(groups, string(runes[startIdx:idx]))
|
||||
idx++
|
||||
break
|
||||
}
|
||||
idx++
|
||||
}
|
||||
} else if r == '"' {
|
||||
// parse double quoted key
|
||||
idx++
|
||||
startIdx := idx
|
||||
for {
|
||||
if idx >= len(runes) {
|
||||
return nil, fmt.Errorf("unclosed double-quoted key")
|
||||
}
|
||||
r = runes[idx]
|
||||
if r == '"' {
|
||||
groups = append(groups, string(runes[startIdx:idx]))
|
||||
idx++
|
||||
break
|
||||
}
|
||||
idx++
|
||||
}
|
||||
} else if r == '.' {
|
||||
idx++
|
||||
if idx >= len(runes) {
|
||||
return nil, fmt.Errorf("unexpected end of key")
|
||||
}
|
||||
r = runes[idx]
|
||||
if !isValidBareChar(r) && r != '\'' && r != '"' && r != ' ' {
|
||||
return nil, fmt.Errorf("expecting key part after dot")
|
||||
}
|
||||
} else {
|
||||
return nil, fmt.Errorf("invalid key character: %c", r)
|
||||
}
|
||||
}
|
||||
if len(groups) == 0 {
|
||||
return nil, fmt.Errorf("empty key")
|
||||
}
|
||||
return groups, nil
|
||||
}
|
||||
|
||||
func isValidBareChar(r rune) bool {
|
||||
return isAlphanumeric(r) || r == '-' || isDigit(r)
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,287 +0,0 @@
|
|||
// Implementation of TOML's local date/time.
|
||||
//
|
||||
// Copied over from Google's civil to avoid pulling all the Google dependencies.
|
||||
// Originals:
|
||||
// https://raw.githubusercontent.com/googleapis/google-cloud-go/ed46f5086358513cf8c25f8e3f022cb838a49d66/civil/civil.go
|
||||
// Changes:
|
||||
// * Renamed files from civil* to localtime*.
|
||||
// * Package changed from civil to toml.
|
||||
// * 'Local' prefix added to all structs.
|
||||
//
|
||||
// Copyright 2016 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// Package civil implements types for civil time, a time-zone-independent
|
||||
// representation of time that follows the rules of the proleptic
|
||||
// Gregorian calendar with exactly 24-hour days, 60-minute hours, and 60-second
|
||||
// minutes.
|
||||
//
|
||||
// Because they lack location information, these types do not represent unique
|
||||
// moments or intervals of time. Use time.Time for that purpose.
|
||||
package toml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
)
|
||||
|
||||
// A LocalDate represents a date (year, month, day).
|
||||
//
|
||||
// This type does not include location information, and therefore does not
|
||||
// describe a unique 24-hour timespan.
|
||||
type LocalDate struct {
|
||||
Year int // Year (e.g., 2014).
|
||||
Month time.Month // Month of the year (January = 1, ...).
|
||||
Day int // Day of the month, starting at 1.
|
||||
}
|
||||
|
||||
// LocalDateOf returns the LocalDate in which a time occurs in that time's location.
|
||||
func LocalDateOf(t time.Time) LocalDate {
|
||||
var d LocalDate
|
||||
d.Year, d.Month, d.Day = t.Date()
|
||||
return d
|
||||
}
|
||||
|
||||
// ParseLocalDate parses a string in RFC3339 full-date format and returns the date value it represents.
|
||||
func ParseLocalDate(s string) (LocalDate, error) {
|
||||
t, err := time.Parse("2006-01-02", s)
|
||||
if err != nil {
|
||||
return LocalDate{}, err
|
||||
}
|
||||
return LocalDateOf(t), nil
|
||||
}
|
||||
|
||||
// String returns the date in RFC3339 full-date format.
|
||||
func (d LocalDate) String() string {
|
||||
return fmt.Sprintf("%04d-%02d-%02d", d.Year, d.Month, d.Day)
|
||||
}
|
||||
|
||||
// IsValid reports whether the date is valid.
|
||||
func (d LocalDate) IsValid() bool {
|
||||
return LocalDateOf(d.In(time.UTC)) == d
|
||||
}
|
||||
|
||||
// In returns the time corresponding to time 00:00:00 of the date in the location.
|
||||
//
|
||||
// In is always consistent with time.LocalDate, even when time.LocalDate returns a time
|
||||
// on a different day. For example, if loc is America/Indiana/Vincennes, then both
|
||||
// time.LocalDate(1955, time.May, 1, 0, 0, 0, 0, loc)
|
||||
// and
|
||||
// civil.LocalDate{Year: 1955, Month: time.May, Day: 1}.In(loc)
|
||||
// return 23:00:00 on April 30, 1955.
|
||||
//
|
||||
// In panics if loc is nil.
|
||||
func (d LocalDate) In(loc *time.Location) time.Time {
|
||||
return time.Date(d.Year, d.Month, d.Day, 0, 0, 0, 0, loc)
|
||||
}
|
||||
|
||||
// AddDays returns the date that is n days in the future.
|
||||
// n can also be negative to go into the past.
|
||||
func (d LocalDate) AddDays(n int) LocalDate {
|
||||
return LocalDateOf(d.In(time.UTC).AddDate(0, 0, n))
|
||||
}
|
||||
|
||||
// DaysSince returns the signed number of days between the date and s, not including the end day.
|
||||
// This is the inverse operation to AddDays.
|
||||
func (d LocalDate) DaysSince(s LocalDate) (days int) {
|
||||
// We convert to Unix time so we do not have to worry about leap seconds:
|
||||
// Unix time increases by exactly 86400 seconds per day.
|
||||
deltaUnix := d.In(time.UTC).Unix() - s.In(time.UTC).Unix()
|
||||
return int(deltaUnix / 86400)
|
||||
}
|
||||
|
||||
// Before reports whether d1 occurs before d2.
|
||||
func (d1 LocalDate) Before(d2 LocalDate) bool {
|
||||
if d1.Year != d2.Year {
|
||||
return d1.Year < d2.Year
|
||||
}
|
||||
if d1.Month != d2.Month {
|
||||
return d1.Month < d2.Month
|
||||
}
|
||||
return d1.Day < d2.Day
|
||||
}
|
||||
|
||||
// After reports whether d1 occurs after d2.
|
||||
func (d1 LocalDate) After(d2 LocalDate) bool {
|
||||
return d2.Before(d1)
|
||||
}
|
||||
|
||||
// MarshalText implements the encoding.TextMarshaler interface.
|
||||
// The output is the result of d.String().
|
||||
func (d LocalDate) MarshalText() ([]byte, error) {
|
||||
return []byte(d.String()), nil
|
||||
}
|
||||
|
||||
// UnmarshalText implements the encoding.TextUnmarshaler interface.
|
||||
// The date is expected to be a string in a format accepted by ParseLocalDate.
|
||||
func (d *LocalDate) UnmarshalText(data []byte) error {
|
||||
var err error
|
||||
*d, err = ParseLocalDate(string(data))
|
||||
return err
|
||||
}
|
||||
|
||||
// A LocalTime represents a time with nanosecond precision.
|
||||
//
|
||||
// This type does not include location information, and therefore does not
|
||||
// describe a unique moment in time.
|
||||
//
|
||||
// This type exists to represent the TIME type in storage-based APIs like BigQuery.
|
||||
// Most operations on Times are unlikely to be meaningful. Prefer the LocalDateTime type.
|
||||
type LocalTime struct {
|
||||
Hour int // The hour of the day in 24-hour format; range [0-23]
|
||||
Minute int // The minute of the hour; range [0-59]
|
||||
Second int // The second of the minute; range [0-59]
|
||||
Nanosecond int // The nanosecond of the second; range [0-999999999]
|
||||
}
|
||||
|
||||
// LocalTimeOf returns the LocalTime representing the time of day in which a time occurs
|
||||
// in that time's location. It ignores the date.
|
||||
func LocalTimeOf(t time.Time) LocalTime {
|
||||
var tm LocalTime
|
||||
tm.Hour, tm.Minute, tm.Second = t.Clock()
|
||||
tm.Nanosecond = t.Nanosecond()
|
||||
return tm
|
||||
}
|
||||
|
||||
// ParseLocalTime parses a string and returns the time value it represents.
|
||||
// ParseLocalTime accepts an extended form of the RFC3339 partial-time format. After
|
||||
// the HH:MM:SS part of the string, an optional fractional part may appear,
|
||||
// consisting of a decimal point followed by one to nine decimal digits.
|
||||
// (RFC3339 admits only one digit after the decimal point).
|
||||
func ParseLocalTime(s string) (LocalTime, error) {
|
||||
t, err := time.Parse("15:04:05.999999999", s)
|
||||
if err != nil {
|
||||
return LocalTime{}, err
|
||||
}
|
||||
return LocalTimeOf(t), nil
|
||||
}
|
||||
|
||||
// String returns the date in the format described in ParseLocalTime. If Nanoseconds
|
||||
// is zero, no fractional part will be generated. Otherwise, the result will
|
||||
// end with a fractional part consisting of a decimal point and nine digits.
|
||||
func (t LocalTime) String() string {
|
||||
s := fmt.Sprintf("%02d:%02d:%02d", t.Hour, t.Minute, t.Second)
|
||||
if t.Nanosecond == 0 {
|
||||
return s
|
||||
}
|
||||
return s + fmt.Sprintf(".%09d", t.Nanosecond)
|
||||
}
|
||||
|
||||
// IsValid reports whether the time is valid.
|
||||
func (t LocalTime) IsValid() bool {
|
||||
// Construct a non-zero time.
|
||||
tm := time.Date(2, 2, 2, t.Hour, t.Minute, t.Second, t.Nanosecond, time.UTC)
|
||||
return LocalTimeOf(tm) == t
|
||||
}
|
||||
|
||||
// MarshalText implements the encoding.TextMarshaler interface.
|
||||
// The output is the result of t.String().
|
||||
func (t LocalTime) MarshalText() ([]byte, error) {
|
||||
return []byte(t.String()), nil
|
||||
}
|
||||
|
||||
// UnmarshalText implements the encoding.TextUnmarshaler interface.
|
||||
// The time is expected to be a string in a format accepted by ParseLocalTime.
|
||||
func (t *LocalTime) UnmarshalText(data []byte) error {
|
||||
var err error
|
||||
*t, err = ParseLocalTime(string(data))
|
||||
return err
|
||||
}
|
||||
|
||||
// A LocalDateTime represents a date and time.
|
||||
//
|
||||
// This type does not include location information, and therefore does not
|
||||
// describe a unique moment in time.
|
||||
type LocalDateTime struct {
|
||||
Date LocalDate
|
||||
Time LocalTime
|
||||
}
|
||||
|
||||
// Note: We deliberately do not embed LocalDate into LocalDateTime, to avoid promoting AddDays and Sub.
|
||||
|
||||
// LocalDateTimeOf returns the LocalDateTime in which a time occurs in that time's location.
|
||||
func LocalDateTimeOf(t time.Time) LocalDateTime {
|
||||
return LocalDateTime{
|
||||
Date: LocalDateOf(t),
|
||||
Time: LocalTimeOf(t),
|
||||
}
|
||||
}
|
||||
|
||||
// ParseLocalDateTime parses a string and returns the LocalDateTime it represents.
|
||||
// ParseLocalDateTime accepts a variant of the RFC3339 date-time format that omits
|
||||
// the time offset but includes an optional fractional time, as described in
|
||||
// ParseLocalTime. Informally, the accepted format is
|
||||
// YYYY-MM-DDTHH:MM:SS[.FFFFFFFFF]
|
||||
// where the 'T' may be a lower-case 't'.
|
||||
func ParseLocalDateTime(s string) (LocalDateTime, error) {
|
||||
t, err := time.Parse("2006-01-02T15:04:05.999999999", s)
|
||||
if err != nil {
|
||||
t, err = time.Parse("2006-01-02t15:04:05.999999999", s)
|
||||
if err != nil {
|
||||
return LocalDateTime{}, err
|
||||
}
|
||||
}
|
||||
return LocalDateTimeOf(t), nil
|
||||
}
|
||||
|
||||
// String returns the date in the format described in ParseLocalDate.
|
||||
func (dt LocalDateTime) String() string {
|
||||
return dt.Date.String() + "T" + dt.Time.String()
|
||||
}
|
||||
|
||||
// IsValid reports whether the datetime is valid.
|
||||
func (dt LocalDateTime) IsValid() bool {
|
||||
return dt.Date.IsValid() && dt.Time.IsValid()
|
||||
}
|
||||
|
||||
// In returns the time corresponding to the LocalDateTime in the given location.
|
||||
//
|
||||
// If the time is missing or ambigous at the location, In returns the same
|
||||
// result as time.LocalDate. For example, if loc is America/Indiana/Vincennes, then
|
||||
// both
|
||||
// time.LocalDate(1955, time.May, 1, 0, 30, 0, 0, loc)
|
||||
// and
|
||||
// civil.LocalDateTime{
|
||||
// civil.LocalDate{Year: 1955, Month: time.May, Day: 1}},
|
||||
// civil.LocalTime{Minute: 30}}.In(loc)
|
||||
// return 23:30:00 on April 30, 1955.
|
||||
//
|
||||
// In panics if loc is nil.
|
||||
func (dt LocalDateTime) In(loc *time.Location) time.Time {
|
||||
return time.Date(dt.Date.Year, dt.Date.Month, dt.Date.Day, dt.Time.Hour, dt.Time.Minute, dt.Time.Second, dt.Time.Nanosecond, loc)
|
||||
}
|
||||
|
||||
// Before reports whether dt1 occurs before dt2.
|
||||
func (dt1 LocalDateTime) Before(dt2 LocalDateTime) bool {
|
||||
return dt1.In(time.UTC).Before(dt2.In(time.UTC))
|
||||
}
|
||||
|
||||
// After reports whether dt1 occurs after dt2.
|
||||
func (dt1 LocalDateTime) After(dt2 LocalDateTime) bool {
|
||||
return dt2.Before(dt1)
|
||||
}
|
||||
|
||||
// MarshalText implements the encoding.TextMarshaler interface.
|
||||
// The output is the result of dt.String().
|
||||
func (dt LocalDateTime) MarshalText() ([]byte, error) {
|
||||
return []byte(dt.String()), nil
|
||||
}
|
||||
|
||||
// UnmarshalText implements the encoding.TextUnmarshaler interface.
|
||||
// The datetime is expected to be a string in a format accepted by ParseLocalDateTime
|
||||
func (dt *LocalDateTime) UnmarshalText(data []byte) error {
|
||||
var err error
|
||||
*dt, err = ParseLocalDateTime(string(data))
|
||||
return err
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -1,39 +0,0 @@
|
|||
title = "TOML Marshal Testing"
|
||||
|
||||
[basic_lists]
|
||||
floats = [12.3,45.6,78.9]
|
||||
bools = [true,false,true]
|
||||
dates = [1979-05-27T07:32:00Z,1980-05-27T07:32:00Z]
|
||||
ints = [8001,8001,8002]
|
||||
uints = [5002,5003]
|
||||
strings = ["One","Two","Three"]
|
||||
|
||||
[[subdocptrs]]
|
||||
name = "Second"
|
||||
|
||||
[basic_map]
|
||||
one = "one"
|
||||
two = "two"
|
||||
|
||||
[subdoc]
|
||||
|
||||
[subdoc.second]
|
||||
name = "Second"
|
||||
|
||||
[subdoc.first]
|
||||
name = "First"
|
||||
|
||||
[basic]
|
||||
uint = 5001
|
||||
bool = true
|
||||
float = 123.4
|
||||
float64 = 123.456782132399
|
||||
int = 5000
|
||||
string = "Bite me"
|
||||
date = 1979-05-27T07:32:00Z
|
||||
|
||||
[[subdoclist]]
|
||||
name = "List.First"
|
||||
|
||||
[[subdoclist]]
|
||||
name = "List.Second"
|
||||
|
|
@ -1,39 +0,0 @@
|
|||
title = "TOML Marshal Testing"
|
||||
|
||||
[basic]
|
||||
bool = true
|
||||
date = 1979-05-27T07:32:00Z
|
||||
float = 123.4
|
||||
float64 = 123.456782132399
|
||||
int = 5000
|
||||
string = "Bite me"
|
||||
uint = 5001
|
||||
|
||||
[basic_lists]
|
||||
bools = [true,false,true]
|
||||
dates = [1979-05-27T07:32:00Z,1980-05-27T07:32:00Z]
|
||||
floats = [12.3,45.6,78.9]
|
||||
ints = [8001,8001,8002]
|
||||
strings = ["One","Two","Three"]
|
||||
uints = [5002,5003]
|
||||
|
||||
[basic_map]
|
||||
one = "one"
|
||||
two = "two"
|
||||
|
||||
[subdoc]
|
||||
|
||||
[subdoc.first]
|
||||
name = "First"
|
||||
|
||||
[subdoc.second]
|
||||
name = "Second"
|
||||
|
||||
[[subdoclist]]
|
||||
name = "List.First"
|
||||
|
||||
[[subdoclist]]
|
||||
name = "List.Second"
|
||||
|
||||
[[subdocptrs]]
|
||||
name = "Second"
|
||||
|
|
@ -1,507 +0,0 @@
|
|||
// TOML Parser.
|
||||
|
||||
package toml
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"math"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type tomlParser struct {
|
||||
flowIdx int
|
||||
flow []token
|
||||
tree *Tree
|
||||
currentTable []string
|
||||
seenTableKeys []string
|
||||
}
|
||||
|
||||
type tomlParserStateFn func() tomlParserStateFn
|
||||
|
||||
// Formats and panics an error message based on a token
|
||||
func (p *tomlParser) raiseError(tok *token, msg string, args ...interface{}) {
|
||||
panic(tok.Position.String() + ": " + fmt.Sprintf(msg, args...))
|
||||
}
|
||||
|
||||
func (p *tomlParser) run() {
|
||||
for state := p.parseStart; state != nil; {
|
||||
state = state()
|
||||
}
|
||||
}
|
||||
|
||||
func (p *tomlParser) peek() *token {
|
||||
if p.flowIdx >= len(p.flow) {
|
||||
return nil
|
||||
}
|
||||
return &p.flow[p.flowIdx]
|
||||
}
|
||||
|
||||
func (p *tomlParser) assume(typ tokenType) {
|
||||
tok := p.getToken()
|
||||
if tok == nil {
|
||||
p.raiseError(tok, "was expecting token %s, but token stream is empty", tok)
|
||||
}
|
||||
if tok.typ != typ {
|
||||
p.raiseError(tok, "was expecting token %s, but got %s instead", typ, tok)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *tomlParser) getToken() *token {
|
||||
tok := p.peek()
|
||||
if tok == nil {
|
||||
return nil
|
||||
}
|
||||
p.flowIdx++
|
||||
return tok
|
||||
}
|
||||
|
||||
func (p *tomlParser) parseStart() tomlParserStateFn {
|
||||
tok := p.peek()
|
||||
|
||||
// end of stream, parsing is finished
|
||||
if tok == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
switch tok.typ {
|
||||
case tokenDoubleLeftBracket:
|
||||
return p.parseGroupArray
|
||||
case tokenLeftBracket:
|
||||
return p.parseGroup
|
||||
case tokenKey:
|
||||
return p.parseAssign
|
||||
case tokenEOF:
|
||||
return nil
|
||||
case tokenError:
|
||||
p.raiseError(tok, "parsing error: %s", tok.String())
|
||||
default:
|
||||
p.raiseError(tok, "unexpected token %s", tok.typ)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *tomlParser) parseGroupArray() tomlParserStateFn {
|
||||
startToken := p.getToken() // discard the [[
|
||||
key := p.getToken()
|
||||
if key.typ != tokenKeyGroupArray {
|
||||
p.raiseError(key, "unexpected token %s, was expecting a table array key", key)
|
||||
}
|
||||
|
||||
// get or create table array element at the indicated part in the path
|
||||
keys, err := parseKey(key.val)
|
||||
if err != nil {
|
||||
p.raiseError(key, "invalid table array key: %s", err)
|
||||
}
|
||||
p.tree.createSubTree(keys[:len(keys)-1], startToken.Position) // create parent entries
|
||||
destTree := p.tree.GetPath(keys)
|
||||
var array []*Tree
|
||||
if destTree == nil {
|
||||
array = make([]*Tree, 0)
|
||||
} else if target, ok := destTree.([]*Tree); ok && target != nil {
|
||||
array = destTree.([]*Tree)
|
||||
} else {
|
||||
p.raiseError(key, "key %s is already assigned and not of type table array", key)
|
||||
}
|
||||
p.currentTable = keys
|
||||
|
||||
// add a new tree to the end of the table array
|
||||
newTree := newTree()
|
||||
newTree.position = startToken.Position
|
||||
array = append(array, newTree)
|
||||
p.tree.SetPath(p.currentTable, array)
|
||||
|
||||
// remove all keys that were children of this table array
|
||||
prefix := key.val + "."
|
||||
found := false
|
||||
for ii := 0; ii < len(p.seenTableKeys); {
|
||||
tableKey := p.seenTableKeys[ii]
|
||||
if strings.HasPrefix(tableKey, prefix) {
|
||||
p.seenTableKeys = append(p.seenTableKeys[:ii], p.seenTableKeys[ii+1:]...)
|
||||
} else {
|
||||
found = (tableKey == key.val)
|
||||
ii++
|
||||
}
|
||||
}
|
||||
|
||||
// keep this key name from use by other kinds of assignments
|
||||
if !found {
|
||||
p.seenTableKeys = append(p.seenTableKeys, key.val)
|
||||
}
|
||||
|
||||
// move to next parser state
|
||||
p.assume(tokenDoubleRightBracket)
|
||||
return p.parseStart
|
||||
}
|
||||
|
||||
func (p *tomlParser) parseGroup() tomlParserStateFn {
|
||||
startToken := p.getToken() // discard the [
|
||||
key := p.getToken()
|
||||
if key.typ != tokenKeyGroup {
|
||||
p.raiseError(key, "unexpected token %s, was expecting a table key", key)
|
||||
}
|
||||
for _, item := range p.seenTableKeys {
|
||||
if item == key.val {
|
||||
p.raiseError(key, "duplicated tables")
|
||||
}
|
||||
}
|
||||
|
||||
p.seenTableKeys = append(p.seenTableKeys, key.val)
|
||||
keys, err := parseKey(key.val)
|
||||
if err != nil {
|
||||
p.raiseError(key, "invalid table array key: %s", err)
|
||||
}
|
||||
if err := p.tree.createSubTree(keys, startToken.Position); err != nil {
|
||||
p.raiseError(key, "%s", err)
|
||||
}
|
||||
destTree := p.tree.GetPath(keys)
|
||||
if target, ok := destTree.(*Tree); ok && target != nil && target.inline {
|
||||
p.raiseError(key, "could not re-define exist inline table or its sub-table : %s",
|
||||
strings.Join(keys, "."))
|
||||
}
|
||||
p.assume(tokenRightBracket)
|
||||
p.currentTable = keys
|
||||
return p.parseStart
|
||||
}
|
||||
|
||||
func (p *tomlParser) parseAssign() tomlParserStateFn {
|
||||
key := p.getToken()
|
||||
p.assume(tokenEqual)
|
||||
|
||||
parsedKey, err := parseKey(key.val)
|
||||
if err != nil {
|
||||
p.raiseError(key, "invalid key: %s", err.Error())
|
||||
}
|
||||
|
||||
value := p.parseRvalue()
|
||||
var tableKey []string
|
||||
if len(p.currentTable) > 0 {
|
||||
tableKey = p.currentTable
|
||||
} else {
|
||||
tableKey = []string{}
|
||||
}
|
||||
|
||||
prefixKey := parsedKey[0 : len(parsedKey)-1]
|
||||
tableKey = append(tableKey, prefixKey...)
|
||||
|
||||
// find the table to assign, looking out for arrays of tables
|
||||
var targetNode *Tree
|
||||
switch node := p.tree.GetPath(tableKey).(type) {
|
||||
case []*Tree:
|
||||
targetNode = node[len(node)-1]
|
||||
case *Tree:
|
||||
targetNode = node
|
||||
case nil:
|
||||
// create intermediate
|
||||
if err := p.tree.createSubTree(tableKey, key.Position); err != nil {
|
||||
p.raiseError(key, "could not create intermediate group: %s", err)
|
||||
}
|
||||
targetNode = p.tree.GetPath(tableKey).(*Tree)
|
||||
default:
|
||||
p.raiseError(key, "Unknown table type for path: %s",
|
||||
strings.Join(tableKey, "."))
|
||||
}
|
||||
|
||||
if targetNode.inline {
|
||||
p.raiseError(key, "could not add key or sub-table to exist inline table or its sub-table : %s",
|
||||
strings.Join(tableKey, "."))
|
||||
}
|
||||
|
||||
// assign value to the found table
|
||||
keyVal := parsedKey[len(parsedKey)-1]
|
||||
localKey := []string{keyVal}
|
||||
finalKey := append(tableKey, keyVal)
|
||||
if targetNode.GetPath(localKey) != nil {
|
||||
p.raiseError(key, "The following key was defined twice: %s",
|
||||
strings.Join(finalKey, "."))
|
||||
}
|
||||
var toInsert interface{}
|
||||
|
||||
switch value.(type) {
|
||||
case *Tree, []*Tree:
|
||||
toInsert = value
|
||||
default:
|
||||
toInsert = &tomlValue{value: value, position: key.Position}
|
||||
}
|
||||
targetNode.values[keyVal] = toInsert
|
||||
return p.parseStart
|
||||
}
|
||||
|
||||
var errInvalidUnderscore = errors.New("invalid use of _ in number")
|
||||
|
||||
func numberContainsInvalidUnderscore(value string) error {
|
||||
// For large numbers, you may use underscores between digits to enhance
|
||||
// readability. Each underscore must be surrounded by at least one digit on
|
||||
// each side.
|
||||
|
||||
hasBefore := false
|
||||
for idx, r := range value {
|
||||
if r == '_' {
|
||||
if !hasBefore || idx+1 >= len(value) {
|
||||
// can't end with an underscore
|
||||
return errInvalidUnderscore
|
||||
}
|
||||
}
|
||||
hasBefore = isDigit(r)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
var errInvalidUnderscoreHex = errors.New("invalid use of _ in hex number")
|
||||
|
||||
func hexNumberContainsInvalidUnderscore(value string) error {
|
||||
hasBefore := false
|
||||
for idx, r := range value {
|
||||
if r == '_' {
|
||||
if !hasBefore || idx+1 >= len(value) {
|
||||
// can't end with an underscore
|
||||
return errInvalidUnderscoreHex
|
||||
}
|
||||
}
|
||||
hasBefore = isHexDigit(r)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func cleanupNumberToken(value string) string {
|
||||
cleanedVal := strings.Replace(value, "_", "", -1)
|
||||
return cleanedVal
|
||||
}
|
||||
|
||||
func (p *tomlParser) parseRvalue() interface{} {
|
||||
tok := p.getToken()
|
||||
if tok == nil || tok.typ == tokenEOF {
|
||||
p.raiseError(tok, "expecting a value")
|
||||
}
|
||||
|
||||
switch tok.typ {
|
||||
case tokenString:
|
||||
return tok.val
|
||||
case tokenTrue:
|
||||
return true
|
||||
case tokenFalse:
|
||||
return false
|
||||
case tokenInf:
|
||||
if tok.val[0] == '-' {
|
||||
return math.Inf(-1)
|
||||
}
|
||||
return math.Inf(1)
|
||||
case tokenNan:
|
||||
return math.NaN()
|
||||
case tokenInteger:
|
||||
cleanedVal := cleanupNumberToken(tok.val)
|
||||
base := 10
|
||||
s := cleanedVal
|
||||
checkInvalidUnderscore := numberContainsInvalidUnderscore
|
||||
if len(cleanedVal) >= 3 && cleanedVal[0] == '0' {
|
||||
switch cleanedVal[1] {
|
||||
case 'x':
|
||||
checkInvalidUnderscore = hexNumberContainsInvalidUnderscore
|
||||
base = 16
|
||||
case 'o':
|
||||
base = 8
|
||||
case 'b':
|
||||
base = 2
|
||||
default:
|
||||
panic("invalid base") // the lexer should catch this first
|
||||
}
|
||||
s = cleanedVal[2:]
|
||||
}
|
||||
|
||||
err := checkInvalidUnderscore(tok.val)
|
||||
if err != nil {
|
||||
p.raiseError(tok, "%s", err)
|
||||
}
|
||||
|
||||
var val interface{}
|
||||
val, err = strconv.ParseInt(s, base, 64)
|
||||
if err == nil {
|
||||
return val
|
||||
}
|
||||
|
||||
if s[0] != '-' {
|
||||
if val, err = strconv.ParseUint(s, base, 64); err == nil {
|
||||
return val
|
||||
}
|
||||
}
|
||||
p.raiseError(tok, "%s", err)
|
||||
case tokenFloat:
|
||||
err := numberContainsInvalidUnderscore(tok.val)
|
||||
if err != nil {
|
||||
p.raiseError(tok, "%s", err)
|
||||
}
|
||||
cleanedVal := cleanupNumberToken(tok.val)
|
||||
val, err := strconv.ParseFloat(cleanedVal, 64)
|
||||
if err != nil {
|
||||
p.raiseError(tok, "%s", err)
|
||||
}
|
||||
return val
|
||||
case tokenLocalTime:
|
||||
val, err := ParseLocalTime(tok.val)
|
||||
if err != nil {
|
||||
p.raiseError(tok, "%s", err)
|
||||
}
|
||||
return val
|
||||
case tokenLocalDate:
|
||||
// a local date may be followed by:
|
||||
// * nothing: this is a local date
|
||||
// * a local time: this is a local date-time
|
||||
|
||||
next := p.peek()
|
||||
if next == nil || next.typ != tokenLocalTime {
|
||||
val, err := ParseLocalDate(tok.val)
|
||||
if err != nil {
|
||||
p.raiseError(tok, "%s", err)
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
localDate := tok
|
||||
localTime := p.getToken()
|
||||
|
||||
next = p.peek()
|
||||
if next == nil || next.typ != tokenTimeOffset {
|
||||
v := localDate.val + "T" + localTime.val
|
||||
val, err := ParseLocalDateTime(v)
|
||||
if err != nil {
|
||||
p.raiseError(tok, "%s", err)
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
offset := p.getToken()
|
||||
|
||||
layout := time.RFC3339Nano
|
||||
v := localDate.val + "T" + localTime.val + offset.val
|
||||
val, err := time.ParseInLocation(layout, v, time.UTC)
|
||||
if err != nil {
|
||||
p.raiseError(tok, "%s", err)
|
||||
}
|
||||
return val
|
||||
case tokenLeftBracket:
|
||||
return p.parseArray()
|
||||
case tokenLeftCurlyBrace:
|
||||
return p.parseInlineTable()
|
||||
case tokenEqual:
|
||||
p.raiseError(tok, "cannot have multiple equals for the same key")
|
||||
case tokenError:
|
||||
p.raiseError(tok, "%s", tok)
|
||||
default:
|
||||
panic(fmt.Errorf("unhandled token: %v", tok))
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func tokenIsComma(t *token) bool {
|
||||
return t != nil && t.typ == tokenComma
|
||||
}
|
||||
|
||||
func (p *tomlParser) parseInlineTable() *Tree {
|
||||
tree := newTree()
|
||||
var previous *token
|
||||
Loop:
|
||||
for {
|
||||
follow := p.peek()
|
||||
if follow == nil || follow.typ == tokenEOF {
|
||||
p.raiseError(follow, "unterminated inline table")
|
||||
}
|
||||
switch follow.typ {
|
||||
case tokenRightCurlyBrace:
|
||||
p.getToken()
|
||||
break Loop
|
||||
case tokenKey, tokenInteger, tokenString:
|
||||
if !tokenIsComma(previous) && previous != nil {
|
||||
p.raiseError(follow, "comma expected between fields in inline table")
|
||||
}
|
||||
key := p.getToken()
|
||||
p.assume(tokenEqual)
|
||||
|
||||
parsedKey, err := parseKey(key.val)
|
||||
if err != nil {
|
||||
p.raiseError(key, "invalid key: %s", err)
|
||||
}
|
||||
|
||||
value := p.parseRvalue()
|
||||
tree.SetPath(parsedKey, value)
|
||||
case tokenComma:
|
||||
if tokenIsComma(previous) {
|
||||
p.raiseError(follow, "need field between two commas in inline table")
|
||||
}
|
||||
p.getToken()
|
||||
default:
|
||||
p.raiseError(follow, "unexpected token type in inline table: %s", follow.String())
|
||||
}
|
||||
previous = follow
|
||||
}
|
||||
if tokenIsComma(previous) {
|
||||
p.raiseError(previous, "trailing comma at the end of inline table")
|
||||
}
|
||||
tree.inline = true
|
||||
return tree
|
||||
}
|
||||
|
||||
func (p *tomlParser) parseArray() interface{} {
|
||||
var array []interface{}
|
||||
arrayType := reflect.TypeOf(newTree())
|
||||
for {
|
||||
follow := p.peek()
|
||||
if follow == nil || follow.typ == tokenEOF {
|
||||
p.raiseError(follow, "unterminated array")
|
||||
}
|
||||
if follow.typ == tokenRightBracket {
|
||||
p.getToken()
|
||||
break
|
||||
}
|
||||
val := p.parseRvalue()
|
||||
if reflect.TypeOf(val) != arrayType {
|
||||
arrayType = nil
|
||||
}
|
||||
array = append(array, val)
|
||||
follow = p.peek()
|
||||
if follow == nil || follow.typ == tokenEOF {
|
||||
p.raiseError(follow, "unterminated array")
|
||||
}
|
||||
if follow.typ != tokenRightBracket && follow.typ != tokenComma {
|
||||
p.raiseError(follow, "missing comma")
|
||||
}
|
||||
if follow.typ == tokenComma {
|
||||
p.getToken()
|
||||
}
|
||||
}
|
||||
|
||||
// if the array is a mixed-type array or its length is 0,
|
||||
// don't convert it to a table array
|
||||
if len(array) <= 0 {
|
||||
arrayType = nil
|
||||
}
|
||||
// An array of Trees is actually an array of inline
|
||||
// tables, which is a shorthand for a table array. If the
|
||||
// array was not converted from []interface{} to []*Tree,
|
||||
// the two notations would not be equivalent.
|
||||
if arrayType == reflect.TypeOf(newTree()) {
|
||||
tomlArray := make([]*Tree, len(array))
|
||||
for i, v := range array {
|
||||
tomlArray[i] = v.(*Tree)
|
||||
}
|
||||
return tomlArray
|
||||
}
|
||||
return array
|
||||
}
|
||||
|
||||
func parseToml(flow []token) *Tree {
|
||||
result := newTree()
|
||||
result.position = Position{1, 1}
|
||||
parser := &tomlParser{
|
||||
flowIdx: 0,
|
||||
flow: flow,
|
||||
tree: result,
|
||||
currentTable: make([]string, 0),
|
||||
seenTableKeys: make([]string, 0),
|
||||
}
|
||||
parser.run()
|
||||
return result
|
||||
}
|
||||
|
|
@ -1,29 +0,0 @@
|
|||
// Position support for go-toml
|
||||
|
||||
package toml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
)
|
||||
|
||||
// Position of a document element within a TOML document.
|
||||
//
|
||||
// Line and Col are both 1-indexed positions for the element's line number and
|
||||
// column number, respectively. Values of zero or less will cause Invalid(),
|
||||
// to return true.
|
||||
type Position struct {
|
||||
Line int // line within the document
|
||||
Col int // column within the line
|
||||
}
|
||||
|
||||
// String representation of the position.
|
||||
// Displays 1-indexed line and column numbers.
|
||||
func (p Position) String() string {
|
||||
return fmt.Sprintf("(%d, %d)", p.Line, p.Col)
|
||||
}
|
||||
|
||||
// Invalid returns whether or not the position is valid (i.e. with negative or
|
||||
// null values)
|
||||
func (p Position) Invalid() bool {
|
||||
return p.Line <= 0 || p.Col <= 0
|
||||
}
|
||||
|
|
@ -1,136 +0,0 @@
|
|||
package toml
|
||||
|
||||
import "fmt"
|
||||
|
||||
// Define tokens
|
||||
type tokenType int
|
||||
|
||||
const (
|
||||
eof = -(iota + 1)
|
||||
)
|
||||
|
||||
const (
|
||||
tokenError tokenType = iota
|
||||
tokenEOF
|
||||
tokenComment
|
||||
tokenKey
|
||||
tokenString
|
||||
tokenInteger
|
||||
tokenTrue
|
||||
tokenFalse
|
||||
tokenFloat
|
||||
tokenInf
|
||||
tokenNan
|
||||
tokenEqual
|
||||
tokenLeftBracket
|
||||
tokenRightBracket
|
||||
tokenLeftCurlyBrace
|
||||
tokenRightCurlyBrace
|
||||
tokenLeftParen
|
||||
tokenRightParen
|
||||
tokenDoubleLeftBracket
|
||||
tokenDoubleRightBracket
|
||||
tokenLocalDate
|
||||
tokenLocalTime
|
||||
tokenTimeOffset
|
||||
tokenKeyGroup
|
||||
tokenKeyGroupArray
|
||||
tokenComma
|
||||
tokenColon
|
||||
tokenDollar
|
||||
tokenStar
|
||||
tokenQuestion
|
||||
tokenDot
|
||||
tokenDotDot
|
||||
tokenEOL
|
||||
)
|
||||
|
||||
var tokenTypeNames = []string{
|
||||
"Error",
|
||||
"EOF",
|
||||
"Comment",
|
||||
"Key",
|
||||
"String",
|
||||
"Integer",
|
||||
"True",
|
||||
"False",
|
||||
"Float",
|
||||
"Inf",
|
||||
"NaN",
|
||||
"=",
|
||||
"[",
|
||||
"]",
|
||||
"{",
|
||||
"}",
|
||||
"(",
|
||||
")",
|
||||
"]]",
|
||||
"[[",
|
||||
"LocalDate",
|
||||
"LocalTime",
|
||||
"TimeOffset",
|
||||
"KeyGroup",
|
||||
"KeyGroupArray",
|
||||
",",
|
||||
":",
|
||||
"$",
|
||||
"*",
|
||||
"?",
|
||||
".",
|
||||
"..",
|
||||
"EOL",
|
||||
}
|
||||
|
||||
type token struct {
|
||||
Position
|
||||
typ tokenType
|
||||
val string
|
||||
}
|
||||
|
||||
func (tt tokenType) String() string {
|
||||
idx := int(tt)
|
||||
if idx < len(tokenTypeNames) {
|
||||
return tokenTypeNames[idx]
|
||||
}
|
||||
return "Unknown"
|
||||
}
|
||||
|
||||
func (t token) String() string {
|
||||
switch t.typ {
|
||||
case tokenEOF:
|
||||
return "EOF"
|
||||
case tokenError:
|
||||
return t.val
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%q", t.val)
|
||||
}
|
||||
|
||||
func isSpace(r rune) bool {
|
||||
return r == ' ' || r == '\t'
|
||||
}
|
||||
|
||||
func isAlphanumeric(r rune) bool {
|
||||
return 'a' <= r && r <= 'z' || 'A' <= r && r <= 'Z' || r == '_'
|
||||
}
|
||||
|
||||
func isKeyChar(r rune) bool {
|
||||
// Keys start with the first character that isn't whitespace or [ and end
|
||||
// with the last non-whitespace character before the equals sign. Keys
|
||||
// cannot contain a # character."
|
||||
return !(r == '\r' || r == '\n' || r == eof || r == '=')
|
||||
}
|
||||
|
||||
func isKeyStartChar(r rune) bool {
|
||||
return !(isSpace(r) || r == '\r' || r == '\n' || r == eof || r == '[')
|
||||
}
|
||||
|
||||
func isDigit(r rune) bool {
|
||||
return '0' <= r && r <= '9'
|
||||
}
|
||||
|
||||
func isHexDigit(r rune) bool {
|
||||
return isDigit(r) ||
|
||||
(r >= 'a' && r <= 'f') ||
|
||||
(r >= 'A' && r <= 'F')
|
||||
}
|
||||
|
|
@ -1,533 +0,0 @@
|
|||
package toml
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"runtime"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type tomlValue struct {
|
||||
value interface{} // string, int64, uint64, float64, bool, time.Time, [] of any of this list
|
||||
comment string
|
||||
commented bool
|
||||
multiline bool
|
||||
literal bool
|
||||
position Position
|
||||
}
|
||||
|
||||
// Tree is the result of the parsing of a TOML file.
|
||||
type Tree struct {
|
||||
values map[string]interface{} // string -> *tomlValue, *Tree, []*Tree
|
||||
comment string
|
||||
commented bool
|
||||
inline bool
|
||||
position Position
|
||||
}
|
||||
|
||||
func newTree() *Tree {
|
||||
return newTreeWithPosition(Position{})
|
||||
}
|
||||
|
||||
func newTreeWithPosition(pos Position) *Tree {
|
||||
return &Tree{
|
||||
values: make(map[string]interface{}),
|
||||
position: pos,
|
||||
}
|
||||
}
|
||||
|
||||
// TreeFromMap initializes a new Tree object using the given map.
|
||||
func TreeFromMap(m map[string]interface{}) (*Tree, error) {
|
||||
result, err := toTree(m)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return result.(*Tree), nil
|
||||
}
|
||||
|
||||
// Position returns the position of the tree.
|
||||
func (t *Tree) Position() Position {
|
||||
return t.position
|
||||
}
|
||||
|
||||
// Has returns a boolean indicating if the given key exists.
|
||||
func (t *Tree) Has(key string) bool {
|
||||
if key == "" {
|
||||
return false
|
||||
}
|
||||
return t.HasPath(strings.Split(key, "."))
|
||||
}
|
||||
|
||||
// HasPath returns true if the given path of keys exists, false otherwise.
|
||||
func (t *Tree) HasPath(keys []string) bool {
|
||||
return t.GetPath(keys) != nil
|
||||
}
|
||||
|
||||
// Keys returns the keys of the toplevel tree (does not recurse).
|
||||
func (t *Tree) Keys() []string {
|
||||
keys := make([]string, len(t.values))
|
||||
i := 0
|
||||
for k := range t.values {
|
||||
keys[i] = k
|
||||
i++
|
||||
}
|
||||
return keys
|
||||
}
|
||||
|
||||
// Get the value at key in the Tree.
|
||||
// Key is a dot-separated path (e.g. a.b.c) without single/double quoted strings.
|
||||
// If you need to retrieve non-bare keys, use GetPath.
|
||||
// Returns nil if the path does not exist in the tree.
|
||||
// If keys is of length zero, the current tree is returned.
|
||||
func (t *Tree) Get(key string) interface{} {
|
||||
if key == "" {
|
||||
return t
|
||||
}
|
||||
return t.GetPath(strings.Split(key, "."))
|
||||
}
|
||||
|
||||
// GetPath returns the element in the tree indicated by 'keys'.
|
||||
// If keys is of length zero, the current tree is returned.
|
||||
func (t *Tree) GetPath(keys []string) interface{} {
|
||||
if len(keys) == 0 {
|
||||
return t
|
||||
}
|
||||
subtree := t
|
||||
for _, intermediateKey := range keys[:len(keys)-1] {
|
||||
value, exists := subtree.values[intermediateKey]
|
||||
if !exists {
|
||||
return nil
|
||||
}
|
||||
switch node := value.(type) {
|
||||
case *Tree:
|
||||
subtree = node
|
||||
case []*Tree:
|
||||
// go to most recent element
|
||||
if len(node) == 0 {
|
||||
return nil
|
||||
}
|
||||
subtree = node[len(node)-1]
|
||||
default:
|
||||
return nil // cannot navigate through other node types
|
||||
}
|
||||
}
|
||||
// branch based on final node type
|
||||
switch node := subtree.values[keys[len(keys)-1]].(type) {
|
||||
case *tomlValue:
|
||||
return node.value
|
||||
default:
|
||||
return node
|
||||
}
|
||||
}
|
||||
|
||||
// GetArray returns the value at key in the Tree.
|
||||
// It returns []string, []int64, etc type if key has homogeneous lists
|
||||
// Key is a dot-separated path (e.g. a.b.c) without single/double quoted strings.
|
||||
// Returns nil if the path does not exist in the tree.
|
||||
// If keys is of length zero, the current tree is returned.
|
||||
func (t *Tree) GetArray(key string) interface{} {
|
||||
if key == "" {
|
||||
return t
|
||||
}
|
||||
return t.GetArrayPath(strings.Split(key, "."))
|
||||
}
|
||||
|
||||
// GetArrayPath returns the element in the tree indicated by 'keys'.
|
||||
// If keys is of length zero, the current tree is returned.
|
||||
func (t *Tree) GetArrayPath(keys []string) interface{} {
|
||||
if len(keys) == 0 {
|
||||
return t
|
||||
}
|
||||
subtree := t
|
||||
for _, intermediateKey := range keys[:len(keys)-1] {
|
||||
value, exists := subtree.values[intermediateKey]
|
||||
if !exists {
|
||||
return nil
|
||||
}
|
||||
switch node := value.(type) {
|
||||
case *Tree:
|
||||
subtree = node
|
||||
case []*Tree:
|
||||
// go to most recent element
|
||||
if len(node) == 0 {
|
||||
return nil
|
||||
}
|
||||
subtree = node[len(node)-1]
|
||||
default:
|
||||
return nil // cannot navigate through other node types
|
||||
}
|
||||
}
|
||||
// branch based on final node type
|
||||
switch node := subtree.values[keys[len(keys)-1]].(type) {
|
||||
case *tomlValue:
|
||||
switch n := node.value.(type) {
|
||||
case []interface{}:
|
||||
return getArray(n)
|
||||
default:
|
||||
return node.value
|
||||
}
|
||||
default:
|
||||
return node
|
||||
}
|
||||
}
|
||||
|
||||
// if homogeneous array, then return slice type object over []interface{}
|
||||
func getArray(n []interface{}) interface{} {
|
||||
var s []string
|
||||
var i64 []int64
|
||||
var f64 []float64
|
||||
var bl []bool
|
||||
for _, value := range n {
|
||||
switch v := value.(type) {
|
||||
case string:
|
||||
s = append(s, v)
|
||||
case int64:
|
||||
i64 = append(i64, v)
|
||||
case float64:
|
||||
f64 = append(f64, v)
|
||||
case bool:
|
||||
bl = append(bl, v)
|
||||
default:
|
||||
return n
|
||||
}
|
||||
}
|
||||
if len(s) == len(n) {
|
||||
return s
|
||||
} else if len(i64) == len(n) {
|
||||
return i64
|
||||
} else if len(f64) == len(n) {
|
||||
return f64
|
||||
} else if len(bl) == len(n) {
|
||||
return bl
|
||||
}
|
||||
return n
|
||||
}
|
||||
|
||||
// GetPosition returns the position of the given key.
|
||||
func (t *Tree) GetPosition(key string) Position {
|
||||
if key == "" {
|
||||
return t.position
|
||||
}
|
||||
return t.GetPositionPath(strings.Split(key, "."))
|
||||
}
|
||||
|
||||
// SetPositionPath sets the position of element in the tree indicated by 'keys'.
|
||||
// If keys is of length zero, the current tree position is set.
|
||||
func (t *Tree) SetPositionPath(keys []string, pos Position) {
|
||||
if len(keys) == 0 {
|
||||
t.position = pos
|
||||
return
|
||||
}
|
||||
subtree := t
|
||||
for _, intermediateKey := range keys[:len(keys)-1] {
|
||||
value, exists := subtree.values[intermediateKey]
|
||||
if !exists {
|
||||
return
|
||||
}
|
||||
switch node := value.(type) {
|
||||
case *Tree:
|
||||
subtree = node
|
||||
case []*Tree:
|
||||
// go to most recent element
|
||||
if len(node) == 0 {
|
||||
return
|
||||
}
|
||||
subtree = node[len(node)-1]
|
||||
default:
|
||||
return
|
||||
}
|
||||
}
|
||||
// branch based on final node type
|
||||
switch node := subtree.values[keys[len(keys)-1]].(type) {
|
||||
case *tomlValue:
|
||||
node.position = pos
|
||||
return
|
||||
case *Tree:
|
||||
node.position = pos
|
||||
return
|
||||
case []*Tree:
|
||||
// go to most recent element
|
||||
if len(node) == 0 {
|
||||
return
|
||||
}
|
||||
node[len(node)-1].position = pos
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
// GetPositionPath returns the element in the tree indicated by 'keys'.
|
||||
// If keys is of length zero, the current tree is returned.
|
||||
func (t *Tree) GetPositionPath(keys []string) Position {
|
||||
if len(keys) == 0 {
|
||||
return t.position
|
||||
}
|
||||
subtree := t
|
||||
for _, intermediateKey := range keys[:len(keys)-1] {
|
||||
value, exists := subtree.values[intermediateKey]
|
||||
if !exists {
|
||||
return Position{0, 0}
|
||||
}
|
||||
switch node := value.(type) {
|
||||
case *Tree:
|
||||
subtree = node
|
||||
case []*Tree:
|
||||
// go to most recent element
|
||||
if len(node) == 0 {
|
||||
return Position{0, 0}
|
||||
}
|
||||
subtree = node[len(node)-1]
|
||||
default:
|
||||
return Position{0, 0}
|
||||
}
|
||||
}
|
||||
// branch based on final node type
|
||||
switch node := subtree.values[keys[len(keys)-1]].(type) {
|
||||
case *tomlValue:
|
||||
return node.position
|
||||
case *Tree:
|
||||
return node.position
|
||||
case []*Tree:
|
||||
// go to most recent element
|
||||
if len(node) == 0 {
|
||||
return Position{0, 0}
|
||||
}
|
||||
return node[len(node)-1].position
|
||||
default:
|
||||
return Position{0, 0}
|
||||
}
|
||||
}
|
||||
|
||||
// GetDefault works like Get but with a default value
|
||||
func (t *Tree) GetDefault(key string, def interface{}) interface{} {
|
||||
val := t.Get(key)
|
||||
if val == nil {
|
||||
return def
|
||||
}
|
||||
return val
|
||||
}
|
||||
|
||||
// SetOptions arguments are supplied to the SetWithOptions and SetPathWithOptions functions to modify marshalling behaviour.
|
||||
// The default values within the struct are valid default options.
|
||||
type SetOptions struct {
|
||||
Comment string
|
||||
Commented bool
|
||||
Multiline bool
|
||||
Literal bool
|
||||
}
|
||||
|
||||
// SetWithOptions is the same as Set, but allows you to provide formatting
|
||||
// instructions to the key, that will be used by Marshal().
|
||||
func (t *Tree) SetWithOptions(key string, opts SetOptions, value interface{}) {
|
||||
t.SetPathWithOptions(strings.Split(key, "."), opts, value)
|
||||
}
|
||||
|
||||
// SetPathWithOptions is the same as SetPath, but allows you to provide
|
||||
// formatting instructions to the key, that will be reused by Marshal().
|
||||
func (t *Tree) SetPathWithOptions(keys []string, opts SetOptions, value interface{}) {
|
||||
subtree := t
|
||||
for i, intermediateKey := range keys[:len(keys)-1] {
|
||||
nextTree, exists := subtree.values[intermediateKey]
|
||||
if !exists {
|
||||
nextTree = newTreeWithPosition(Position{Line: t.position.Line + i, Col: t.position.Col})
|
||||
subtree.values[intermediateKey] = nextTree // add new element here
|
||||
}
|
||||
switch node := nextTree.(type) {
|
||||
case *Tree:
|
||||
subtree = node
|
||||
case []*Tree:
|
||||
// go to most recent element
|
||||
if len(node) == 0 {
|
||||
// create element if it does not exist
|
||||
node = append(node, newTreeWithPosition(Position{Line: t.position.Line + i, Col: t.position.Col}))
|
||||
subtree.values[intermediateKey] = node
|
||||
}
|
||||
subtree = node[len(node)-1]
|
||||
}
|
||||
}
|
||||
|
||||
var toInsert interface{}
|
||||
|
||||
switch v := value.(type) {
|
||||
case *Tree:
|
||||
v.comment = opts.Comment
|
||||
v.commented = opts.Commented
|
||||
toInsert = value
|
||||
case []*Tree:
|
||||
for i := range v {
|
||||
v[i].commented = opts.Commented
|
||||
}
|
||||
toInsert = value
|
||||
case *tomlValue:
|
||||
v.comment = opts.Comment
|
||||
v.commented = opts.Commented
|
||||
v.multiline = opts.Multiline
|
||||
v.literal = opts.Literal
|
||||
toInsert = v
|
||||
default:
|
||||
toInsert = &tomlValue{value: value,
|
||||
comment: opts.Comment,
|
||||
commented: opts.Commented,
|
||||
multiline: opts.Multiline,
|
||||
literal: opts.Literal,
|
||||
position: Position{Line: subtree.position.Line + len(subtree.values) + 1, Col: subtree.position.Col}}
|
||||
}
|
||||
|
||||
subtree.values[keys[len(keys)-1]] = toInsert
|
||||
}
|
||||
|
||||
// Set an element in the tree.
|
||||
// Key is a dot-separated path (e.g. a.b.c).
|
||||
// Creates all necessary intermediate trees, if needed.
|
||||
func (t *Tree) Set(key string, value interface{}) {
|
||||
t.SetWithComment(key, "", false, value)
|
||||
}
|
||||
|
||||
// SetWithComment is the same as Set, but allows you to provide comment
|
||||
// information to the key, that will be reused by Marshal().
|
||||
func (t *Tree) SetWithComment(key string, comment string, commented bool, value interface{}) {
|
||||
t.SetPathWithComment(strings.Split(key, "."), comment, commented, value)
|
||||
}
|
||||
|
||||
// SetPath sets an element in the tree.
|
||||
// Keys is an array of path elements (e.g. {"a","b","c"}).
|
||||
// Creates all necessary intermediate trees, if needed.
|
||||
func (t *Tree) SetPath(keys []string, value interface{}) {
|
||||
t.SetPathWithComment(keys, "", false, value)
|
||||
}
|
||||
|
||||
// SetPathWithComment is the same as SetPath, but allows you to provide comment
|
||||
// information to the key, that will be reused by Marshal().
|
||||
func (t *Tree) SetPathWithComment(keys []string, comment string, commented bool, value interface{}) {
|
||||
t.SetPathWithOptions(keys, SetOptions{Comment: comment, Commented: commented}, value)
|
||||
}
|
||||
|
||||
// Delete removes a key from the tree.
|
||||
// Key is a dot-separated path (e.g. a.b.c).
|
||||
func (t *Tree) Delete(key string) error {
|
||||
keys, err := parseKey(key)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return t.DeletePath(keys)
|
||||
}
|
||||
|
||||
// DeletePath removes a key from the tree.
|
||||
// Keys is an array of path elements (e.g. {"a","b","c"}).
|
||||
func (t *Tree) DeletePath(keys []string) error {
|
||||
keyLen := len(keys)
|
||||
if keyLen == 1 {
|
||||
delete(t.values, keys[0])
|
||||
return nil
|
||||
}
|
||||
tree := t.GetPath(keys[:keyLen-1])
|
||||
item := keys[keyLen-1]
|
||||
switch node := tree.(type) {
|
||||
case *Tree:
|
||||
delete(node.values, item)
|
||||
return nil
|
||||
}
|
||||
return errors.New("no such key to delete")
|
||||
}
|
||||
|
||||
// createSubTree takes a tree and a key and create the necessary intermediate
|
||||
// subtrees to create a subtree at that point. In-place.
|
||||
//
|
||||
// e.g. passing a.b.c will create (assuming tree is empty) tree[a], tree[a][b]
|
||||
// and tree[a][b][c]
|
||||
//
|
||||
// Returns nil on success, error object on failure
|
||||
func (t *Tree) createSubTree(keys []string, pos Position) error {
|
||||
subtree := t
|
||||
for i, intermediateKey := range keys {
|
||||
nextTree, exists := subtree.values[intermediateKey]
|
||||
if !exists {
|
||||
tree := newTreeWithPosition(Position{Line: t.position.Line + i, Col: t.position.Col})
|
||||
tree.position = pos
|
||||
tree.inline = subtree.inline
|
||||
subtree.values[intermediateKey] = tree
|
||||
nextTree = tree
|
||||
}
|
||||
|
||||
switch node := nextTree.(type) {
|
||||
case []*Tree:
|
||||
subtree = node[len(node)-1]
|
||||
case *Tree:
|
||||
subtree = node
|
||||
default:
|
||||
return fmt.Errorf("unknown type for path %s (%s): %T (%#v)",
|
||||
strings.Join(keys, "."), intermediateKey, nextTree, nextTree)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadBytes creates a Tree from a []byte.
|
||||
func LoadBytes(b []byte) (tree *Tree, err error) {
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
if _, ok := r.(runtime.Error); ok {
|
||||
panic(r)
|
||||
}
|
||||
err = fmt.Errorf("%s", r)
|
||||
}
|
||||
}()
|
||||
|
||||
if len(b) >= 4 && (hasUTF32BigEndianBOM4(b) || hasUTF32LittleEndianBOM4(b)) {
|
||||
b = b[4:]
|
||||
} else if len(b) >= 3 && hasUTF8BOM3(b) {
|
||||
b = b[3:]
|
||||
} else if len(b) >= 2 && (hasUTF16BigEndianBOM2(b) || hasUTF16LittleEndianBOM2(b)) {
|
||||
b = b[2:]
|
||||
}
|
||||
|
||||
tree = parseToml(lexToml(b))
|
||||
return
|
||||
}
|
||||
|
||||
func hasUTF16BigEndianBOM2(b []byte) bool {
|
||||
return b[0] == 0xFE && b[1] == 0xFF
|
||||
}
|
||||
|
||||
func hasUTF16LittleEndianBOM2(b []byte) bool {
|
||||
return b[0] == 0xFF && b[1] == 0xFE
|
||||
}
|
||||
|
||||
func hasUTF8BOM3(b []byte) bool {
|
||||
return b[0] == 0xEF && b[1] == 0xBB && b[2] == 0xBF
|
||||
}
|
||||
|
||||
func hasUTF32BigEndianBOM4(b []byte) bool {
|
||||
return b[0] == 0x00 && b[1] == 0x00 && b[2] == 0xFE && b[3] == 0xFF
|
||||
}
|
||||
|
||||
func hasUTF32LittleEndianBOM4(b []byte) bool {
|
||||
return b[0] == 0xFF && b[1] == 0xFE && b[2] == 0x00 && b[3] == 0x00
|
||||
}
|
||||
|
||||
// LoadReader creates a Tree from any io.Reader.
|
||||
func LoadReader(reader io.Reader) (tree *Tree, err error) {
|
||||
inputBytes, err := ioutil.ReadAll(reader)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
tree, err = LoadBytes(inputBytes)
|
||||
return
|
||||
}
|
||||
|
||||
// Load creates a Tree from a string.
|
||||
func Load(content string) (tree *Tree, err error) {
|
||||
return LoadBytes([]byte(content))
|
||||
}
|
||||
|
||||
// LoadFile creates a Tree from a file.
|
||||
func LoadFile(path string) (tree *Tree, err error) {
|
||||
file, err := os.Open(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer file.Close()
|
||||
return LoadReader(file)
|
||||
}
|
||||
|
|
@ -1,71 +0,0 @@
|
|||
package toml
|
||||
|
||||
// PubTOMLValue wrapping tomlValue in order to access all properties from outside.
|
||||
type PubTOMLValue = tomlValue
|
||||
|
||||
func (ptv *PubTOMLValue) Value() interface{} {
|
||||
return ptv.value
|
||||
}
|
||||
func (ptv *PubTOMLValue) Comment() string {
|
||||
return ptv.comment
|
||||
}
|
||||
func (ptv *PubTOMLValue) Commented() bool {
|
||||
return ptv.commented
|
||||
}
|
||||
func (ptv *PubTOMLValue) Multiline() bool {
|
||||
return ptv.multiline
|
||||
}
|
||||
func (ptv *PubTOMLValue) Position() Position {
|
||||
return ptv.position
|
||||
}
|
||||
|
||||
func (ptv *PubTOMLValue) SetValue(v interface{}) {
|
||||
ptv.value = v
|
||||
}
|
||||
func (ptv *PubTOMLValue) SetComment(s string) {
|
||||
ptv.comment = s
|
||||
}
|
||||
func (ptv *PubTOMLValue) SetCommented(c bool) {
|
||||
ptv.commented = c
|
||||
}
|
||||
func (ptv *PubTOMLValue) SetMultiline(m bool) {
|
||||
ptv.multiline = m
|
||||
}
|
||||
func (ptv *PubTOMLValue) SetPosition(p Position) {
|
||||
ptv.position = p
|
||||
}
|
||||
|
||||
// PubTree wrapping Tree in order to access all properties from outside.
|
||||
type PubTree = Tree
|
||||
|
||||
func (pt *PubTree) Values() map[string]interface{} {
|
||||
return pt.values
|
||||
}
|
||||
|
||||
func (pt *PubTree) Comment() string {
|
||||
return pt.comment
|
||||
}
|
||||
|
||||
func (pt *PubTree) Commented() bool {
|
||||
return pt.commented
|
||||
}
|
||||
|
||||
func (pt *PubTree) Inline() bool {
|
||||
return pt.inline
|
||||
}
|
||||
|
||||
func (pt *PubTree) SetValues(v map[string]interface{}) {
|
||||
pt.values = v
|
||||
}
|
||||
|
||||
func (pt *PubTree) SetComment(c string) {
|
||||
pt.comment = c
|
||||
}
|
||||
|
||||
func (pt *PubTree) SetCommented(c bool) {
|
||||
pt.commented = c
|
||||
}
|
||||
|
||||
func (pt *PubTree) SetInline(i bool) {
|
||||
pt.inline = i
|
||||
}
|
||||
|
|
@ -1,155 +0,0 @@
|
|||
package toml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
"time"
|
||||
)
|
||||
|
||||
var kindToType = [reflect.String + 1]reflect.Type{
|
||||
reflect.Bool: reflect.TypeOf(true),
|
||||
reflect.String: reflect.TypeOf(""),
|
||||
reflect.Float32: reflect.TypeOf(float64(1)),
|
||||
reflect.Float64: reflect.TypeOf(float64(1)),
|
||||
reflect.Int: reflect.TypeOf(int64(1)),
|
||||
reflect.Int8: reflect.TypeOf(int64(1)),
|
||||
reflect.Int16: reflect.TypeOf(int64(1)),
|
||||
reflect.Int32: reflect.TypeOf(int64(1)),
|
||||
reflect.Int64: reflect.TypeOf(int64(1)),
|
||||
reflect.Uint: reflect.TypeOf(uint64(1)),
|
||||
reflect.Uint8: reflect.TypeOf(uint64(1)),
|
||||
reflect.Uint16: reflect.TypeOf(uint64(1)),
|
||||
reflect.Uint32: reflect.TypeOf(uint64(1)),
|
||||
reflect.Uint64: reflect.TypeOf(uint64(1)),
|
||||
}
|
||||
|
||||
// typeFor returns a reflect.Type for a reflect.Kind, or nil if none is found.
|
||||
// supported values:
|
||||
// string, bool, int64, uint64, float64, time.Time, int, int8, int16, int32, uint, uint8, uint16, uint32, float32
|
||||
func typeFor(k reflect.Kind) reflect.Type {
|
||||
if k > 0 && int(k) < len(kindToType) {
|
||||
return kindToType[k]
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func simpleValueCoercion(object interface{}) (interface{}, error) {
|
||||
switch original := object.(type) {
|
||||
case string, bool, int64, uint64, float64, time.Time:
|
||||
return original, nil
|
||||
case int:
|
||||
return int64(original), nil
|
||||
case int8:
|
||||
return int64(original), nil
|
||||
case int16:
|
||||
return int64(original), nil
|
||||
case int32:
|
||||
return int64(original), nil
|
||||
case uint:
|
||||
return uint64(original), nil
|
||||
case uint8:
|
||||
return uint64(original), nil
|
||||
case uint16:
|
||||
return uint64(original), nil
|
||||
case uint32:
|
||||
return uint64(original), nil
|
||||
case float32:
|
||||
return float64(original), nil
|
||||
case fmt.Stringer:
|
||||
return original.String(), nil
|
||||
case []interface{}:
|
||||
value := reflect.ValueOf(original)
|
||||
length := value.Len()
|
||||
arrayValue := reflect.MakeSlice(value.Type(), 0, length)
|
||||
for i := 0; i < length; i++ {
|
||||
val := value.Index(i).Interface()
|
||||
simpleValue, err := simpleValueCoercion(val)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
arrayValue = reflect.Append(arrayValue, reflect.ValueOf(simpleValue))
|
||||
}
|
||||
return arrayValue.Interface(), nil
|
||||
default:
|
||||
return nil, fmt.Errorf("cannot convert type %T to Tree", object)
|
||||
}
|
||||
}
|
||||
|
||||
func sliceToTree(object interface{}) (interface{}, error) {
|
||||
// arrays are a bit tricky, since they can represent either a
|
||||
// collection of simple values, which is represented by one
|
||||
// *tomlValue, or an array of tables, which is represented by an
|
||||
// array of *Tree.
|
||||
|
||||
// holding the assumption that this function is called from toTree only when value.Kind() is Array or Slice
|
||||
value := reflect.ValueOf(object)
|
||||
insideType := value.Type().Elem()
|
||||
length := value.Len()
|
||||
if length > 0 {
|
||||
insideType = reflect.ValueOf(value.Index(0).Interface()).Type()
|
||||
}
|
||||
if insideType.Kind() == reflect.Map {
|
||||
// this is considered as an array of tables
|
||||
tablesArray := make([]*Tree, 0, length)
|
||||
for i := 0; i < length; i++ {
|
||||
table := value.Index(i)
|
||||
tree, err := toTree(table.Interface())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
tablesArray = append(tablesArray, tree.(*Tree))
|
||||
}
|
||||
return tablesArray, nil
|
||||
}
|
||||
|
||||
sliceType := typeFor(insideType.Kind())
|
||||
if sliceType == nil {
|
||||
sliceType = insideType
|
||||
}
|
||||
|
||||
arrayValue := reflect.MakeSlice(reflect.SliceOf(sliceType), 0, length)
|
||||
|
||||
for i := 0; i < length; i++ {
|
||||
val := value.Index(i).Interface()
|
||||
simpleValue, err := simpleValueCoercion(val)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
arrayValue = reflect.Append(arrayValue, reflect.ValueOf(simpleValue))
|
||||
}
|
||||
return &tomlValue{value: arrayValue.Interface(), position: Position{}}, nil
|
||||
}
|
||||
|
||||
func toTree(object interface{}) (interface{}, error) {
|
||||
value := reflect.ValueOf(object)
|
||||
|
||||
if value.Kind() == reflect.Map {
|
||||
values := map[string]interface{}{}
|
||||
keys := value.MapKeys()
|
||||
for _, key := range keys {
|
||||
if key.Kind() != reflect.String {
|
||||
if _, ok := key.Interface().(string); !ok {
|
||||
return nil, fmt.Errorf("map key needs to be a string, not %T (%v)", key.Interface(), key.Kind())
|
||||
}
|
||||
}
|
||||
|
||||
v := value.MapIndex(key)
|
||||
newValue, err := toTree(v.Interface())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
values[key.String()] = newValue
|
||||
}
|
||||
return &Tree{values: values, position: Position{}}, nil
|
||||
}
|
||||
|
||||
if value.Kind() == reflect.Array || value.Kind() == reflect.Slice {
|
||||
return sliceToTree(object)
|
||||
}
|
||||
|
||||
simpleValue, err := simpleValueCoercion(object)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &tomlValue{value: simpleValue, position: Position{}}, nil
|
||||
}
|
||||
|
|
@ -1,552 +0,0 @@
|
|||
package toml
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"math/big"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
type valueComplexity int
|
||||
|
||||
const (
|
||||
valueSimple valueComplexity = iota + 1
|
||||
valueComplex
|
||||
)
|
||||
|
||||
type sortNode struct {
|
||||
key string
|
||||
complexity valueComplexity
|
||||
}
|
||||
|
||||
// Encodes a string to a TOML-compliant multi-line string value
|
||||
// This function is a clone of the existing encodeTomlString function, except that whitespace characters
|
||||
// are preserved. Quotation marks and backslashes are also not escaped.
|
||||
func encodeMultilineTomlString(value string, commented string) string {
|
||||
var b bytes.Buffer
|
||||
adjacentQuoteCount := 0
|
||||
|
||||
b.WriteString(commented)
|
||||
for i, rr := range value {
|
||||
if rr != '"' {
|
||||
adjacentQuoteCount = 0
|
||||
} else {
|
||||
adjacentQuoteCount++
|
||||
}
|
||||
switch rr {
|
||||
case '\b':
|
||||
b.WriteString(`\b`)
|
||||
case '\t':
|
||||
b.WriteString("\t")
|
||||
case '\n':
|
||||
b.WriteString("\n" + commented)
|
||||
case '\f':
|
||||
b.WriteString(`\f`)
|
||||
case '\r':
|
||||
b.WriteString("\r")
|
||||
case '"':
|
||||
if adjacentQuoteCount >= 3 || i == len(value)-1 {
|
||||
adjacentQuoteCount = 0
|
||||
b.WriteString(`\"`)
|
||||
} else {
|
||||
b.WriteString(`"`)
|
||||
}
|
||||
case '\\':
|
||||
b.WriteString(`\`)
|
||||
default:
|
||||
intRr := uint16(rr)
|
||||
if intRr < 0x001F {
|
||||
b.WriteString(fmt.Sprintf("\\u%0.4X", intRr))
|
||||
} else {
|
||||
b.WriteRune(rr)
|
||||
}
|
||||
}
|
||||
}
|
||||
return b.String()
|
||||
}
|
||||
|
||||
// Encodes a string to a TOML-compliant string value
|
||||
func encodeTomlString(value string) string {
|
||||
var b bytes.Buffer
|
||||
|
||||
for _, rr := range value {
|
||||
switch rr {
|
||||
case '\b':
|
||||
b.WriteString(`\b`)
|
||||
case '\t':
|
||||
b.WriteString(`\t`)
|
||||
case '\n':
|
||||
b.WriteString(`\n`)
|
||||
case '\f':
|
||||
b.WriteString(`\f`)
|
||||
case '\r':
|
||||
b.WriteString(`\r`)
|
||||
case '"':
|
||||
b.WriteString(`\"`)
|
||||
case '\\':
|
||||
b.WriteString(`\\`)
|
||||
default:
|
||||
intRr := uint16(rr)
|
||||
if intRr < 0x001F {
|
||||
b.WriteString(fmt.Sprintf("\\u%0.4X", intRr))
|
||||
} else {
|
||||
b.WriteRune(rr)
|
||||
}
|
||||
}
|
||||
}
|
||||
return b.String()
|
||||
}
|
||||
|
||||
func tomlTreeStringRepresentation(t *Tree, ord MarshalOrder) (string, error) {
|
||||
var orderedVals []sortNode
|
||||
switch ord {
|
||||
case OrderPreserve:
|
||||
orderedVals = sortByLines(t)
|
||||
default:
|
||||
orderedVals = sortAlphabetical(t)
|
||||
}
|
||||
|
||||
var values []string
|
||||
for _, node := range orderedVals {
|
||||
k := node.key
|
||||
v := t.values[k]
|
||||
|
||||
repr, err := tomlValueStringRepresentation(v, "", "", ord, false)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
values = append(values, quoteKeyIfNeeded(k)+" = "+repr)
|
||||
}
|
||||
return "{ " + strings.Join(values, ", ") + " }", nil
|
||||
}
|
||||
|
||||
func tomlValueStringRepresentation(v interface{}, commented string, indent string, ord MarshalOrder, arraysOneElementPerLine bool) (string, error) {
|
||||
// this interface check is added to dereference the change made in the writeTo function.
|
||||
// That change was made to allow this function to see formatting options.
|
||||
tv, ok := v.(*tomlValue)
|
||||
if ok {
|
||||
v = tv.value
|
||||
} else {
|
||||
tv = &tomlValue{}
|
||||
}
|
||||
|
||||
switch value := v.(type) {
|
||||
case uint64:
|
||||
return strconv.FormatUint(value, 10), nil
|
||||
case int64:
|
||||
return strconv.FormatInt(value, 10), nil
|
||||
case float64:
|
||||
// Default bit length is full 64
|
||||
bits := 64
|
||||
// Float panics if nan is used
|
||||
if !math.IsNaN(value) {
|
||||
// if 32 bit accuracy is enough to exactly show, use 32
|
||||
_, acc := big.NewFloat(value).Float32()
|
||||
if acc == big.Exact {
|
||||
bits = 32
|
||||
}
|
||||
}
|
||||
if math.Trunc(value) == value {
|
||||
return strings.ToLower(strconv.FormatFloat(value, 'f', 1, bits)), nil
|
||||
}
|
||||
return strings.ToLower(strconv.FormatFloat(value, 'f', -1, bits)), nil
|
||||
case string:
|
||||
if tv.multiline {
|
||||
if tv.literal {
|
||||
b := strings.Builder{}
|
||||
b.WriteString("'''\n")
|
||||
b.Write([]byte(value))
|
||||
b.WriteString("\n'''")
|
||||
return b.String(), nil
|
||||
} else {
|
||||
return "\"\"\"\n" + encodeMultilineTomlString(value, commented) + "\"\"\"", nil
|
||||
}
|
||||
}
|
||||
return "\"" + encodeTomlString(value) + "\"", nil
|
||||
case []byte:
|
||||
b, _ := v.([]byte)
|
||||
return string(b), nil
|
||||
case bool:
|
||||
if value {
|
||||
return "true", nil
|
||||
}
|
||||
return "false", nil
|
||||
case time.Time:
|
||||
return value.Format(time.RFC3339), nil
|
||||
case LocalDate:
|
||||
return value.String(), nil
|
||||
case LocalDateTime:
|
||||
return value.String(), nil
|
||||
case LocalTime:
|
||||
return value.String(), nil
|
||||
case *Tree:
|
||||
return tomlTreeStringRepresentation(value, ord)
|
||||
case nil:
|
||||
return "", nil
|
||||
}
|
||||
|
||||
rv := reflect.ValueOf(v)
|
||||
|
||||
if rv.Kind() == reflect.Slice {
|
||||
var values []string
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
item := rv.Index(i).Interface()
|
||||
itemRepr, err := tomlValueStringRepresentation(item, commented, indent, ord, arraysOneElementPerLine)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
values = append(values, itemRepr)
|
||||
}
|
||||
if arraysOneElementPerLine && len(values) > 1 {
|
||||
stringBuffer := bytes.Buffer{}
|
||||
valueIndent := indent + ` ` // TODO: move that to a shared encoder state
|
||||
|
||||
stringBuffer.WriteString("[\n")
|
||||
|
||||
for _, value := range values {
|
||||
stringBuffer.WriteString(valueIndent)
|
||||
stringBuffer.WriteString(commented + value)
|
||||
stringBuffer.WriteString(`,`)
|
||||
stringBuffer.WriteString("\n")
|
||||
}
|
||||
|
||||
stringBuffer.WriteString(indent + commented + "]")
|
||||
|
||||
return stringBuffer.String(), nil
|
||||
}
|
||||
return "[" + strings.Join(values, ", ") + "]", nil
|
||||
}
|
||||
return "", fmt.Errorf("unsupported value type %T: %v", v, v)
|
||||
}
|
||||
|
||||
func getTreeArrayLine(trees []*Tree) (line int) {
|
||||
// Prevent returning 0 for empty trees
|
||||
line = int(^uint(0) >> 1)
|
||||
// get lowest line number >= 0
|
||||
for _, tv := range trees {
|
||||
if tv.position.Line < line || line == 0 {
|
||||
line = tv.position.Line
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func sortByLines(t *Tree) (vals []sortNode) {
|
||||
var (
|
||||
line int
|
||||
lines []int
|
||||
tv *Tree
|
||||
tom *tomlValue
|
||||
node sortNode
|
||||
)
|
||||
vals = make([]sortNode, 0)
|
||||
m := make(map[int]sortNode)
|
||||
|
||||
for k := range t.values {
|
||||
v := t.values[k]
|
||||
switch v.(type) {
|
||||
case *Tree:
|
||||
tv = v.(*Tree)
|
||||
line = tv.position.Line
|
||||
node = sortNode{key: k, complexity: valueComplex}
|
||||
case []*Tree:
|
||||
line = getTreeArrayLine(v.([]*Tree))
|
||||
node = sortNode{key: k, complexity: valueComplex}
|
||||
default:
|
||||
tom = v.(*tomlValue)
|
||||
line = tom.position.Line
|
||||
node = sortNode{key: k, complexity: valueSimple}
|
||||
}
|
||||
lines = append(lines, line)
|
||||
vals = append(vals, node)
|
||||
m[line] = node
|
||||
}
|
||||
sort.Ints(lines)
|
||||
|
||||
for i, line := range lines {
|
||||
vals[i] = m[line]
|
||||
}
|
||||
|
||||
return vals
|
||||
}
|
||||
|
||||
func sortAlphabetical(t *Tree) (vals []sortNode) {
|
||||
var (
|
||||
node sortNode
|
||||
simpVals []string
|
||||
compVals []string
|
||||
)
|
||||
vals = make([]sortNode, 0)
|
||||
m := make(map[string]sortNode)
|
||||
|
||||
for k := range t.values {
|
||||
v := t.values[k]
|
||||
switch v.(type) {
|
||||
case *Tree, []*Tree:
|
||||
node = sortNode{key: k, complexity: valueComplex}
|
||||
compVals = append(compVals, node.key)
|
||||
default:
|
||||
node = sortNode{key: k, complexity: valueSimple}
|
||||
simpVals = append(simpVals, node.key)
|
||||
}
|
||||
vals = append(vals, node)
|
||||
m[node.key] = node
|
||||
}
|
||||
|
||||
// Simples first to match previous implementation
|
||||
sort.Strings(simpVals)
|
||||
i := 0
|
||||
for _, key := range simpVals {
|
||||
vals[i] = m[key]
|
||||
i++
|
||||
}
|
||||
|
||||
sort.Strings(compVals)
|
||||
for _, key := range compVals {
|
||||
vals[i] = m[key]
|
||||
i++
|
||||
}
|
||||
|
||||
return vals
|
||||
}
|
||||
|
||||
func (t *Tree) writeTo(w io.Writer, indent, keyspace string, bytesCount int64, arraysOneElementPerLine bool) (int64, error) {
|
||||
return t.writeToOrdered(w, indent, keyspace, bytesCount, arraysOneElementPerLine, OrderAlphabetical, " ", false, false)
|
||||
}
|
||||
|
||||
func (t *Tree) writeToOrdered(w io.Writer, indent, keyspace string, bytesCount int64, arraysOneElementPerLine bool, ord MarshalOrder, indentString string, compactComments, parentCommented bool) (int64, error) {
|
||||
var orderedVals []sortNode
|
||||
|
||||
switch ord {
|
||||
case OrderPreserve:
|
||||
orderedVals = sortByLines(t)
|
||||
default:
|
||||
orderedVals = sortAlphabetical(t)
|
||||
}
|
||||
|
||||
for _, node := range orderedVals {
|
||||
switch node.complexity {
|
||||
case valueComplex:
|
||||
k := node.key
|
||||
v := t.values[k]
|
||||
|
||||
combinedKey := quoteKeyIfNeeded(k)
|
||||
if keyspace != "" {
|
||||
combinedKey = keyspace + "." + combinedKey
|
||||
}
|
||||
|
||||
switch node := v.(type) {
|
||||
// node has to be of those two types given how keys are sorted above
|
||||
case *Tree:
|
||||
tv, ok := t.values[k].(*Tree)
|
||||
if !ok {
|
||||
return bytesCount, fmt.Errorf("invalid value type at %s: %T", k, t.values[k])
|
||||
}
|
||||
if tv.comment != "" {
|
||||
comment := strings.Replace(tv.comment, "\n", "\n"+indent+"#", -1)
|
||||
start := "# "
|
||||
if strings.HasPrefix(comment, "#") {
|
||||
start = ""
|
||||
}
|
||||
writtenBytesCountComment, errc := writeStrings(w, "\n", indent, start, comment)
|
||||
bytesCount += int64(writtenBytesCountComment)
|
||||
if errc != nil {
|
||||
return bytesCount, errc
|
||||
}
|
||||
}
|
||||
|
||||
var commented string
|
||||
if parentCommented || t.commented || tv.commented {
|
||||
commented = "# "
|
||||
}
|
||||
writtenBytesCount, err := writeStrings(w, "\n", indent, commented, "[", combinedKey, "]\n")
|
||||
bytesCount += int64(writtenBytesCount)
|
||||
if err != nil {
|
||||
return bytesCount, err
|
||||
}
|
||||
bytesCount, err = node.writeToOrdered(w, indent+indentString, combinedKey, bytesCount, arraysOneElementPerLine, ord, indentString, compactComments, parentCommented || t.commented || tv.commented)
|
||||
if err != nil {
|
||||
return bytesCount, err
|
||||
}
|
||||
case []*Tree:
|
||||
for _, subTree := range node {
|
||||
var commented string
|
||||
if parentCommented || t.commented || subTree.commented {
|
||||
commented = "# "
|
||||
}
|
||||
writtenBytesCount, err := writeStrings(w, "\n", indent, commented, "[[", combinedKey, "]]\n")
|
||||
bytesCount += int64(writtenBytesCount)
|
||||
if err != nil {
|
||||
return bytesCount, err
|
||||
}
|
||||
|
||||
bytesCount, err = subTree.writeToOrdered(w, indent+indentString, combinedKey, bytesCount, arraysOneElementPerLine, ord, indentString, compactComments, parentCommented || t.commented || subTree.commented)
|
||||
if err != nil {
|
||||
return bytesCount, err
|
||||
}
|
||||
}
|
||||
}
|
||||
default: // Simple
|
||||
k := node.key
|
||||
v, ok := t.values[k].(*tomlValue)
|
||||
if !ok {
|
||||
return bytesCount, fmt.Errorf("invalid value type at %s: %T", k, t.values[k])
|
||||
}
|
||||
|
||||
var commented string
|
||||
if parentCommented || t.commented || v.commented {
|
||||
commented = "# "
|
||||
}
|
||||
repr, err := tomlValueStringRepresentation(v, commented, indent, ord, arraysOneElementPerLine)
|
||||
if err != nil {
|
||||
return bytesCount, err
|
||||
}
|
||||
|
||||
if v.comment != "" {
|
||||
comment := strings.Replace(v.comment, "\n", "\n"+indent+"#", -1)
|
||||
start := "# "
|
||||
if strings.HasPrefix(comment, "#") {
|
||||
start = ""
|
||||
}
|
||||
if !compactComments {
|
||||
writtenBytesCountComment, errc := writeStrings(w, "\n")
|
||||
bytesCount += int64(writtenBytesCountComment)
|
||||
if errc != nil {
|
||||
return bytesCount, errc
|
||||
}
|
||||
}
|
||||
writtenBytesCountComment, errc := writeStrings(w, indent, start, comment, "\n")
|
||||
bytesCount += int64(writtenBytesCountComment)
|
||||
if errc != nil {
|
||||
return bytesCount, errc
|
||||
}
|
||||
}
|
||||
|
||||
quotedKey := quoteKeyIfNeeded(k)
|
||||
writtenBytesCount, err := writeStrings(w, indent, commented, quotedKey, " = ", repr, "\n")
|
||||
bytesCount += int64(writtenBytesCount)
|
||||
if err != nil {
|
||||
return bytesCount, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return bytesCount, nil
|
||||
}
|
||||
|
||||
// quote a key if it does not fit the bare key format (A-Za-z0-9_-)
|
||||
// quoted keys use the same rules as strings
|
||||
func quoteKeyIfNeeded(k string) string {
|
||||
// when encoding a map with the 'quoteMapKeys' option enabled, the tree will contain
|
||||
// keys that have already been quoted.
|
||||
// not an ideal situation, but good enough of a stop gap.
|
||||
if len(k) >= 2 && k[0] == '"' && k[len(k)-1] == '"' {
|
||||
return k
|
||||
}
|
||||
isBare := true
|
||||
for _, r := range k {
|
||||
if !isValidBareChar(r) {
|
||||
isBare = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if isBare {
|
||||
return k
|
||||
}
|
||||
return quoteKey(k)
|
||||
}
|
||||
|
||||
func quoteKey(k string) string {
|
||||
return "\"" + encodeTomlString(k) + "\""
|
||||
}
|
||||
|
||||
func writeStrings(w io.Writer, s ...string) (int, error) {
|
||||
var n int
|
||||
for i := range s {
|
||||
b, err := io.WriteString(w, s[i])
|
||||
n += b
|
||||
if err != nil {
|
||||
return n, err
|
||||
}
|
||||
}
|
||||
return n, nil
|
||||
}
|
||||
|
||||
// WriteTo encode the Tree as Toml and writes it to the writer w.
|
||||
// Returns the number of bytes written in case of success, or an error if anything happened.
|
||||
func (t *Tree) WriteTo(w io.Writer) (int64, error) {
|
||||
return t.writeTo(w, "", "", 0, false)
|
||||
}
|
||||
|
||||
// ToTomlString generates a human-readable representation of the current tree.
|
||||
// Output spans multiple lines, and is suitable for ingest by a TOML parser.
|
||||
// If the conversion cannot be performed, ToString returns a non-nil error.
|
||||
func (t *Tree) ToTomlString() (string, error) {
|
||||
b, err := t.Marshal()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return string(b), nil
|
||||
}
|
||||
|
||||
// String generates a human-readable representation of the current tree.
|
||||
// Alias of ToString. Present to implement the fmt.Stringer interface.
|
||||
func (t *Tree) String() string {
|
||||
result, _ := t.ToTomlString()
|
||||
return result
|
||||
}
|
||||
|
||||
// ToMap recursively generates a representation of the tree using Go built-in structures.
|
||||
// The following types are used:
|
||||
//
|
||||
// * bool
|
||||
// * float64
|
||||
// * int64
|
||||
// * string
|
||||
// * uint64
|
||||
// * time.Time
|
||||
// * map[string]interface{} (where interface{} is any of this list)
|
||||
// * []interface{} (where interface{} is any of this list)
|
||||
func (t *Tree) ToMap() map[string]interface{} {
|
||||
result := map[string]interface{}{}
|
||||
|
||||
for k, v := range t.values {
|
||||
switch node := v.(type) {
|
||||
case []*Tree:
|
||||
var array []interface{}
|
||||
for _, item := range node {
|
||||
array = append(array, item.ToMap())
|
||||
}
|
||||
result[k] = array
|
||||
case *Tree:
|
||||
result[k] = node.ToMap()
|
||||
case *tomlValue:
|
||||
result[k] = tomlValueToGo(node.value)
|
||||
}
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
func tomlValueToGo(v interface{}) interface{} {
|
||||
if tree, ok := v.(*Tree); ok {
|
||||
return tree.ToMap()
|
||||
}
|
||||
|
||||
rv := reflect.ValueOf(v)
|
||||
|
||||
if rv.Kind() != reflect.Slice {
|
||||
return v
|
||||
}
|
||||
values := make([]interface{}, rv.Len())
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
item := rv.Index(i).Interface()
|
||||
values[i] = tomlValueToGo(item)
|
||||
}
|
||||
return values
|
||||
}
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
package toml
|
||||
|
||||
// ValueStringRepresentation transforms an interface{} value into its toml string representation.
|
||||
func ValueStringRepresentation(v interface{}, commented string, indent string, ord MarshalOrder, arraysOneElementPerLine bool) (string, error) {
|
||||
return tomlValueStringRepresentation(v, commented, indent, ord, arraysOneElementPerLine)
|
||||
}
|
||||
|
|
@ -140,6 +140,17 @@ fmt.Println(string(b))
|
|||
|
||||
[marshal]: https://pkg.go.dev/github.com/pelletier/go-toml/v2#Marshal
|
||||
|
||||
## Unstable API
|
||||
|
||||
This API does not yet follow the backward compatibility guarantees of this
|
||||
library. They provide early access to features that may have rough edges or an
|
||||
API subject to change.
|
||||
|
||||
### Parser
|
||||
|
||||
Parser is the unstable API that allows iterative parsing of a TOML document at
|
||||
the AST level. See https://pkg.go.dev/github.com/pelletier/go-toml/v2/unstable.
|
||||
|
||||
## Benchmarks
|
||||
|
||||
Execution time speedup compared to other Go TOML libraries:
|
||||
|
|
@ -542,7 +553,7 @@ complete solutions exist out there.
|
|||
|
||||
## Versioning
|
||||
|
||||
Go-toml follows [Semantic Versioning](http://semver.org/). The supported version
|
||||
Go-toml follows [Semantic Versioning](https://semver.org). The supported version
|
||||
of [TOML](https://github.com/toml-lang/toml) is indicated at the beginning of
|
||||
this document. The last two major versions of Go are supported
|
||||
(see [Go Release Policy](https://golang.org/doc/devel/release.html#policy)).
|
||||
|
|
|
|||
|
|
@ -77,8 +77,9 @@ cover() {
|
|||
|
||||
pushd "$dir"
|
||||
go test -covermode=atomic -coverpkg=./... -coverprofile=coverage.out.tmp ./...
|
||||
cat coverage.out.tmp | grep -v testsuite | grep -v tomltestgen | grep -v gotoml-test-decoder > coverage.out
|
||||
cat coverage.out.tmp | grep -v fuzz | grep -v testsuite | grep -v tomltestgen | grep -v gotoml-test-decoder > coverage.out
|
||||
go tool cover -func=coverage.out
|
||||
echo "Coverage profile for ${branch}: ${dir}/coverage.out" >&2
|
||||
popd
|
||||
|
||||
if [ "${branch}" != "HEAD" ]; then
|
||||
|
|
|
|||
|
|
@ -5,6 +5,8 @@ import (
|
|||
"math"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"github.com/pelletier/go-toml/v2/unstable"
|
||||
)
|
||||
|
||||
func parseInteger(b []byte) (int64, error) {
|
||||
|
|
@ -32,7 +34,7 @@ func parseLocalDate(b []byte) (LocalDate, error) {
|
|||
var date LocalDate
|
||||
|
||||
if len(b) != 10 || b[4] != '-' || b[7] != '-' {
|
||||
return date, newDecodeError(b, "dates are expected to have the format YYYY-MM-DD")
|
||||
return date, unstable.NewParserError(b, "dates are expected to have the format YYYY-MM-DD")
|
||||
}
|
||||
|
||||
var err error
|
||||
|
|
@ -53,7 +55,7 @@ func parseLocalDate(b []byte) (LocalDate, error) {
|
|||
}
|
||||
|
||||
if !isValidDate(date.Year, date.Month, date.Day) {
|
||||
return LocalDate{}, newDecodeError(b, "impossible date")
|
||||
return LocalDate{}, unstable.NewParserError(b, "impossible date")
|
||||
}
|
||||
|
||||
return date, nil
|
||||
|
|
@ -64,7 +66,7 @@ func parseDecimalDigits(b []byte) (int, error) {
|
|||
|
||||
for i, c := range b {
|
||||
if c < '0' || c > '9' {
|
||||
return 0, newDecodeError(b[i:i+1], "expected digit (0-9)")
|
||||
return 0, unstable.NewParserError(b[i:i+1], "expected digit (0-9)")
|
||||
}
|
||||
v *= 10
|
||||
v += int(c - '0')
|
||||
|
|
@ -97,7 +99,7 @@ func parseDateTime(b []byte) (time.Time, error) {
|
|||
} else {
|
||||
const dateTimeByteLen = 6
|
||||
if len(b) != dateTimeByteLen {
|
||||
return time.Time{}, newDecodeError(b, "invalid date-time timezone")
|
||||
return time.Time{}, unstable.NewParserError(b, "invalid date-time timezone")
|
||||
}
|
||||
var direction int
|
||||
switch b[0] {
|
||||
|
|
@ -106,11 +108,11 @@ func parseDateTime(b []byte) (time.Time, error) {
|
|||
case '+':
|
||||
direction = +1
|
||||
default:
|
||||
return time.Time{}, newDecodeError(b[:1], "invalid timezone offset character")
|
||||
return time.Time{}, unstable.NewParserError(b[:1], "invalid timezone offset character")
|
||||
}
|
||||
|
||||
if b[3] != ':' {
|
||||
return time.Time{}, newDecodeError(b[3:4], "expected a : separator")
|
||||
return time.Time{}, unstable.NewParserError(b[3:4], "expected a : separator")
|
||||
}
|
||||
|
||||
hours, err := parseDecimalDigits(b[1:3])
|
||||
|
|
@ -118,7 +120,7 @@ func parseDateTime(b []byte) (time.Time, error) {
|
|||
return time.Time{}, err
|
||||
}
|
||||
if hours > 23 {
|
||||
return time.Time{}, newDecodeError(b[:1], "invalid timezone offset hours")
|
||||
return time.Time{}, unstable.NewParserError(b[:1], "invalid timezone offset hours")
|
||||
}
|
||||
|
||||
minutes, err := parseDecimalDigits(b[4:6])
|
||||
|
|
@ -126,7 +128,7 @@ func parseDateTime(b []byte) (time.Time, error) {
|
|||
return time.Time{}, err
|
||||
}
|
||||
if minutes > 59 {
|
||||
return time.Time{}, newDecodeError(b[:1], "invalid timezone offset minutes")
|
||||
return time.Time{}, unstable.NewParserError(b[:1], "invalid timezone offset minutes")
|
||||
}
|
||||
|
||||
seconds := direction * (hours*3600 + minutes*60)
|
||||
|
|
@ -139,7 +141,7 @@ func parseDateTime(b []byte) (time.Time, error) {
|
|||
}
|
||||
|
||||
if len(b) > 0 {
|
||||
return time.Time{}, newDecodeError(b, "extra bytes at the end of the timezone")
|
||||
return time.Time{}, unstable.NewParserError(b, "extra bytes at the end of the timezone")
|
||||
}
|
||||
|
||||
t := time.Date(
|
||||
|
|
@ -160,7 +162,7 @@ func parseLocalDateTime(b []byte) (LocalDateTime, []byte, error) {
|
|||
|
||||
const localDateTimeByteMinLen = 11
|
||||
if len(b) < localDateTimeByteMinLen {
|
||||
return dt, nil, newDecodeError(b, "local datetimes are expected to have the format YYYY-MM-DDTHH:MM:SS[.NNNNNNNNN]")
|
||||
return dt, nil, unstable.NewParserError(b, "local datetimes are expected to have the format YYYY-MM-DDTHH:MM:SS[.NNNNNNNNN]")
|
||||
}
|
||||
|
||||
date, err := parseLocalDate(b[:10])
|
||||
|
|
@ -171,7 +173,7 @@ func parseLocalDateTime(b []byte) (LocalDateTime, []byte, error) {
|
|||
|
||||
sep := b[10]
|
||||
if sep != 'T' && sep != ' ' && sep != 't' {
|
||||
return dt, nil, newDecodeError(b[10:11], "datetime separator is expected to be T or a space")
|
||||
return dt, nil, unstable.NewParserError(b[10:11], "datetime separator is expected to be T or a space")
|
||||
}
|
||||
|
||||
t, rest, err := parseLocalTime(b[11:])
|
||||
|
|
@ -195,7 +197,7 @@ func parseLocalTime(b []byte) (LocalTime, []byte, error) {
|
|||
// check if b matches to have expected format HH:MM:SS[.NNNNNN]
|
||||
const localTimeByteLen = 8
|
||||
if len(b) < localTimeByteLen {
|
||||
return t, nil, newDecodeError(b, "times are expected to have the format HH:MM:SS[.NNNNNN]")
|
||||
return t, nil, unstable.NewParserError(b, "times are expected to have the format HH:MM:SS[.NNNNNN]")
|
||||
}
|
||||
|
||||
var err error
|
||||
|
|
@ -206,10 +208,10 @@ func parseLocalTime(b []byte) (LocalTime, []byte, error) {
|
|||
}
|
||||
|
||||
if t.Hour > 23 {
|
||||
return t, nil, newDecodeError(b[0:2], "hour cannot be greater 23")
|
||||
return t, nil, unstable.NewParserError(b[0:2], "hour cannot be greater 23")
|
||||
}
|
||||
if b[2] != ':' {
|
||||
return t, nil, newDecodeError(b[2:3], "expecting colon between hours and minutes")
|
||||
return t, nil, unstable.NewParserError(b[2:3], "expecting colon between hours and minutes")
|
||||
}
|
||||
|
||||
t.Minute, err = parseDecimalDigits(b[3:5])
|
||||
|
|
@ -217,10 +219,10 @@ func parseLocalTime(b []byte) (LocalTime, []byte, error) {
|
|||
return t, nil, err
|
||||
}
|
||||
if t.Minute > 59 {
|
||||
return t, nil, newDecodeError(b[3:5], "minutes cannot be greater 59")
|
||||
return t, nil, unstable.NewParserError(b[3:5], "minutes cannot be greater 59")
|
||||
}
|
||||
if b[5] != ':' {
|
||||
return t, nil, newDecodeError(b[5:6], "expecting colon between minutes and seconds")
|
||||
return t, nil, unstable.NewParserError(b[5:6], "expecting colon between minutes and seconds")
|
||||
}
|
||||
|
||||
t.Second, err = parseDecimalDigits(b[6:8])
|
||||
|
|
@ -229,7 +231,7 @@ func parseLocalTime(b []byte) (LocalTime, []byte, error) {
|
|||
}
|
||||
|
||||
if t.Second > 60 {
|
||||
return t, nil, newDecodeError(b[6:8], "seconds cannot be greater 60")
|
||||
return t, nil, unstable.NewParserError(b[6:8], "seconds cannot be greater 60")
|
||||
}
|
||||
|
||||
b = b[8:]
|
||||
|
|
@ -242,7 +244,7 @@ func parseLocalTime(b []byte) (LocalTime, []byte, error) {
|
|||
for i, c := range b[1:] {
|
||||
if !isDigit(c) {
|
||||
if i == 0 {
|
||||
return t, nil, newDecodeError(b[0:1], "need at least one digit after fraction point")
|
||||
return t, nil, unstable.NewParserError(b[0:1], "need at least one digit after fraction point")
|
||||
}
|
||||
break
|
||||
}
|
||||
|
|
@ -266,7 +268,7 @@ func parseLocalTime(b []byte) (LocalTime, []byte, error) {
|
|||
}
|
||||
|
||||
if precision == 0 {
|
||||
return t, nil, newDecodeError(b[:1], "nanoseconds need at least one digit")
|
||||
return t, nil, unstable.NewParserError(b[:1], "nanoseconds need at least one digit")
|
||||
}
|
||||
|
||||
t.Nanosecond = frac * nspow[precision]
|
||||
|
|
@ -289,24 +291,24 @@ func parseFloat(b []byte) (float64, error) {
|
|||
}
|
||||
|
||||
if cleaned[0] == '.' {
|
||||
return 0, newDecodeError(b, "float cannot start with a dot")
|
||||
return 0, unstable.NewParserError(b, "float cannot start with a dot")
|
||||
}
|
||||
|
||||
if cleaned[len(cleaned)-1] == '.' {
|
||||
return 0, newDecodeError(b, "float cannot end with a dot")
|
||||
return 0, unstable.NewParserError(b, "float cannot end with a dot")
|
||||
}
|
||||
|
||||
dotAlreadySeen := false
|
||||
for i, c := range cleaned {
|
||||
if c == '.' {
|
||||
if dotAlreadySeen {
|
||||
return 0, newDecodeError(b[i:i+1], "float can have at most one decimal point")
|
||||
return 0, unstable.NewParserError(b[i:i+1], "float can have at most one decimal point")
|
||||
}
|
||||
if !isDigit(cleaned[i-1]) {
|
||||
return 0, newDecodeError(b[i-1:i+1], "float decimal point must be preceded by a digit")
|
||||
return 0, unstable.NewParserError(b[i-1:i+1], "float decimal point must be preceded by a digit")
|
||||
}
|
||||
if !isDigit(cleaned[i+1]) {
|
||||
return 0, newDecodeError(b[i:i+2], "float decimal point must be followed by a digit")
|
||||
return 0, unstable.NewParserError(b[i:i+2], "float decimal point must be followed by a digit")
|
||||
}
|
||||
dotAlreadySeen = true
|
||||
}
|
||||
|
|
@ -317,12 +319,12 @@ func parseFloat(b []byte) (float64, error) {
|
|||
start = 1
|
||||
}
|
||||
if cleaned[start] == '0' && isDigit(cleaned[start+1]) {
|
||||
return 0, newDecodeError(b, "float integer part cannot have leading zeroes")
|
||||
return 0, unstable.NewParserError(b, "float integer part cannot have leading zeroes")
|
||||
}
|
||||
|
||||
f, err := strconv.ParseFloat(string(cleaned), 64)
|
||||
if err != nil {
|
||||
return 0, newDecodeError(b, "unable to parse float: %w", err)
|
||||
return 0, unstable.NewParserError(b, "unable to parse float: %w", err)
|
||||
}
|
||||
|
||||
return f, nil
|
||||
|
|
@ -336,7 +338,7 @@ func parseIntHex(b []byte) (int64, error) {
|
|||
|
||||
i, err := strconv.ParseInt(string(cleaned), 16, 64)
|
||||
if err != nil {
|
||||
return 0, newDecodeError(b, "couldn't parse hexadecimal number: %w", err)
|
||||
return 0, unstable.NewParserError(b, "couldn't parse hexadecimal number: %w", err)
|
||||
}
|
||||
|
||||
return i, nil
|
||||
|
|
@ -350,7 +352,7 @@ func parseIntOct(b []byte) (int64, error) {
|
|||
|
||||
i, err := strconv.ParseInt(string(cleaned), 8, 64)
|
||||
if err != nil {
|
||||
return 0, newDecodeError(b, "couldn't parse octal number: %w", err)
|
||||
return 0, unstable.NewParserError(b, "couldn't parse octal number: %w", err)
|
||||
}
|
||||
|
||||
return i, nil
|
||||
|
|
@ -364,7 +366,7 @@ func parseIntBin(b []byte) (int64, error) {
|
|||
|
||||
i, err := strconv.ParseInt(string(cleaned), 2, 64)
|
||||
if err != nil {
|
||||
return 0, newDecodeError(b, "couldn't parse binary number: %w", err)
|
||||
return 0, unstable.NewParserError(b, "couldn't parse binary number: %w", err)
|
||||
}
|
||||
|
||||
return i, nil
|
||||
|
|
@ -387,12 +389,12 @@ func parseIntDec(b []byte) (int64, error) {
|
|||
}
|
||||
|
||||
if len(cleaned) > startIdx+1 && cleaned[startIdx] == '0' {
|
||||
return 0, newDecodeError(b, "leading zero not allowed on decimal number")
|
||||
return 0, unstable.NewParserError(b, "leading zero not allowed on decimal number")
|
||||
}
|
||||
|
||||
i, err := strconv.ParseInt(string(cleaned), 10, 64)
|
||||
if err != nil {
|
||||
return 0, newDecodeError(b, "couldn't parse decimal number: %w", err)
|
||||
return 0, unstable.NewParserError(b, "couldn't parse decimal number: %w", err)
|
||||
}
|
||||
|
||||
return i, nil
|
||||
|
|
@ -409,11 +411,11 @@ func checkAndRemoveUnderscoresIntegers(b []byte) ([]byte, error) {
|
|||
}
|
||||
|
||||
if b[start] == '_' {
|
||||
return nil, newDecodeError(b[start:start+1], "number cannot start with underscore")
|
||||
return nil, unstable.NewParserError(b[start:start+1], "number cannot start with underscore")
|
||||
}
|
||||
|
||||
if b[len(b)-1] == '_' {
|
||||
return nil, newDecodeError(b[len(b)-1:], "number cannot end with underscore")
|
||||
return nil, unstable.NewParserError(b[len(b)-1:], "number cannot end with underscore")
|
||||
}
|
||||
|
||||
// fast path
|
||||
|
|
@ -435,7 +437,7 @@ func checkAndRemoveUnderscoresIntegers(b []byte) ([]byte, error) {
|
|||
c := b[i]
|
||||
if c == '_' {
|
||||
if !before {
|
||||
return nil, newDecodeError(b[i-1:i+1], "number must have at least one digit between underscores")
|
||||
return nil, unstable.NewParserError(b[i-1:i+1], "number must have at least one digit between underscores")
|
||||
}
|
||||
before = false
|
||||
} else {
|
||||
|
|
@ -449,11 +451,11 @@ func checkAndRemoveUnderscoresIntegers(b []byte) ([]byte, error) {
|
|||
|
||||
func checkAndRemoveUnderscoresFloats(b []byte) ([]byte, error) {
|
||||
if b[0] == '_' {
|
||||
return nil, newDecodeError(b[0:1], "number cannot start with underscore")
|
||||
return nil, unstable.NewParserError(b[0:1], "number cannot start with underscore")
|
||||
}
|
||||
|
||||
if b[len(b)-1] == '_' {
|
||||
return nil, newDecodeError(b[len(b)-1:], "number cannot end with underscore")
|
||||
return nil, unstable.NewParserError(b[len(b)-1:], "number cannot end with underscore")
|
||||
}
|
||||
|
||||
// fast path
|
||||
|
|
@ -476,10 +478,10 @@ func checkAndRemoveUnderscoresFloats(b []byte) ([]byte, error) {
|
|||
switch c {
|
||||
case '_':
|
||||
if !before {
|
||||
return nil, newDecodeError(b[i-1:i+1], "number must have at least one digit between underscores")
|
||||
return nil, unstable.NewParserError(b[i-1:i+1], "number must have at least one digit between underscores")
|
||||
}
|
||||
if i < len(b)-1 && (b[i+1] == 'e' || b[i+1] == 'E') {
|
||||
return nil, newDecodeError(b[i+1:i+2], "cannot have underscore before exponent")
|
||||
return nil, unstable.NewParserError(b[i+1:i+2], "cannot have underscore before exponent")
|
||||
}
|
||||
before = false
|
||||
case '+', '-':
|
||||
|
|
@ -488,15 +490,15 @@ func checkAndRemoveUnderscoresFloats(b []byte) ([]byte, error) {
|
|||
before = false
|
||||
case 'e', 'E':
|
||||
if i < len(b)-1 && b[i+1] == '_' {
|
||||
return nil, newDecodeError(b[i+1:i+2], "cannot have underscore after exponent")
|
||||
return nil, unstable.NewParserError(b[i+1:i+2], "cannot have underscore after exponent")
|
||||
}
|
||||
cleaned = append(cleaned, c)
|
||||
case '.':
|
||||
if i < len(b)-1 && b[i+1] == '_' {
|
||||
return nil, newDecodeError(b[i+1:i+2], "cannot have underscore after decimal point")
|
||||
return nil, unstable.NewParserError(b[i+1:i+2], "cannot have underscore after decimal point")
|
||||
}
|
||||
if i > 0 && b[i-1] == '_' {
|
||||
return nil, newDecodeError(b[i-1:i], "cannot have underscore before decimal point")
|
||||
return nil, unstable.NewParserError(b[i-1:i], "cannot have underscore before decimal point")
|
||||
}
|
||||
cleaned = append(cleaned, c)
|
||||
default:
|
||||
|
|
@ -542,3 +544,7 @@ func daysIn(m int, year int) int {
|
|||
func isLeap(year int) bool {
|
||||
return year%4 == 0 && (year%100 != 0 || year%400 == 0)
|
||||
}
|
||||
|
||||
func isDigit(r byte) bool {
|
||||
return r >= '0' && r <= '9'
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,6 +6,7 @@ import (
|
|||
"strings"
|
||||
|
||||
"github.com/pelletier/go-toml/v2/internal/danger"
|
||||
"github.com/pelletier/go-toml/v2/unstable"
|
||||
)
|
||||
|
||||
// DecodeError represents an error encountered during the parsing or decoding
|
||||
|
|
@ -55,25 +56,6 @@ func (s *StrictMissingError) String() string {
|
|||
|
||||
type Key []string
|
||||
|
||||
// internal version of DecodeError that is used as the base to create a
|
||||
// DecodeError with full context.
|
||||
type decodeError struct {
|
||||
highlight []byte
|
||||
message string
|
||||
key Key // optional
|
||||
}
|
||||
|
||||
func (de *decodeError) Error() string {
|
||||
return de.message
|
||||
}
|
||||
|
||||
func newDecodeError(highlight []byte, format string, args ...interface{}) error {
|
||||
return &decodeError{
|
||||
highlight: highlight,
|
||||
message: fmt.Errorf(format, args...).Error(),
|
||||
}
|
||||
}
|
||||
|
||||
// Error returns the error message contained in the DecodeError.
|
||||
func (e *DecodeError) Error() string {
|
||||
return "toml: " + e.message
|
||||
|
|
@ -105,12 +87,12 @@ func (e *DecodeError) Key() Key {
|
|||
// highlight can be freely deallocated.
|
||||
//
|
||||
//nolint:funlen
|
||||
func wrapDecodeError(document []byte, de *decodeError) *DecodeError {
|
||||
offset := danger.SubsliceOffset(document, de.highlight)
|
||||
func wrapDecodeError(document []byte, de *unstable.ParserError) *DecodeError {
|
||||
offset := danger.SubsliceOffset(document, de.Highlight)
|
||||
|
||||
errMessage := de.Error()
|
||||
errLine, errColumn := positionAtEnd(document[:offset])
|
||||
before, after := linesOfContext(document, de.highlight, offset, 3)
|
||||
before, after := linesOfContext(document, de.Highlight, offset, 3)
|
||||
|
||||
var buf strings.Builder
|
||||
|
||||
|
|
@ -140,7 +122,7 @@ func wrapDecodeError(document []byte, de *decodeError) *DecodeError {
|
|||
buf.Write(before[0])
|
||||
}
|
||||
|
||||
buf.Write(de.highlight)
|
||||
buf.Write(de.Highlight)
|
||||
|
||||
if len(after) > 0 {
|
||||
buf.Write(after[0])
|
||||
|
|
@ -158,7 +140,7 @@ func wrapDecodeError(document []byte, de *decodeError) *DecodeError {
|
|||
buf.WriteString(strings.Repeat(" ", len(before[0])))
|
||||
}
|
||||
|
||||
buf.WriteString(strings.Repeat("~", len(de.highlight)))
|
||||
buf.WriteString(strings.Repeat("~", len(de.Highlight)))
|
||||
|
||||
if len(errMessage) > 0 {
|
||||
buf.WriteString(" ")
|
||||
|
|
@ -183,7 +165,7 @@ func wrapDecodeError(document []byte, de *decodeError) *DecodeError {
|
|||
message: errMessage,
|
||||
line: errLine,
|
||||
column: errColumn,
|
||||
key: de.key,
|
||||
key: de.Key,
|
||||
human: buf.String(),
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,51 +0,0 @@
|
|||
package ast
|
||||
|
||||
type Reference int
|
||||
|
||||
const InvalidReference Reference = -1
|
||||
|
||||
func (r Reference) Valid() bool {
|
||||
return r != InvalidReference
|
||||
}
|
||||
|
||||
type Builder struct {
|
||||
tree Root
|
||||
lastIdx int
|
||||
}
|
||||
|
||||
func (b *Builder) Tree() *Root {
|
||||
return &b.tree
|
||||
}
|
||||
|
||||
func (b *Builder) NodeAt(ref Reference) *Node {
|
||||
return b.tree.at(ref)
|
||||
}
|
||||
|
||||
func (b *Builder) Reset() {
|
||||
b.tree.nodes = b.tree.nodes[:0]
|
||||
b.lastIdx = 0
|
||||
}
|
||||
|
||||
func (b *Builder) Push(n Node) Reference {
|
||||
b.lastIdx = len(b.tree.nodes)
|
||||
b.tree.nodes = append(b.tree.nodes, n)
|
||||
return Reference(b.lastIdx)
|
||||
}
|
||||
|
||||
func (b *Builder) PushAndChain(n Node) Reference {
|
||||
newIdx := len(b.tree.nodes)
|
||||
b.tree.nodes = append(b.tree.nodes, n)
|
||||
if b.lastIdx >= 0 {
|
||||
b.tree.nodes[b.lastIdx].next = newIdx - b.lastIdx
|
||||
}
|
||||
b.lastIdx = newIdx
|
||||
return Reference(b.lastIdx)
|
||||
}
|
||||
|
||||
func (b *Builder) AttachChild(parent Reference, child Reference) {
|
||||
b.tree.nodes[parent].child = int(child) - int(parent)
|
||||
}
|
||||
|
||||
func (b *Builder) Chain(from Reference, to Reference) {
|
||||
b.tree.nodes[from].next = int(to) - int(from)
|
||||
}
|
||||
42
vendor/github.com/pelletier/go-toml/v2/internal/characters/ascii.go
generated
vendored
Normal file
42
vendor/github.com/pelletier/go-toml/v2/internal/characters/ascii.go
generated
vendored
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
package characters
|
||||
|
||||
var invalidAsciiTable = [256]bool{
|
||||
0x00: true,
|
||||
0x01: true,
|
||||
0x02: true,
|
||||
0x03: true,
|
||||
0x04: true,
|
||||
0x05: true,
|
||||
0x06: true,
|
||||
0x07: true,
|
||||
0x08: true,
|
||||
// 0x09 TAB
|
||||
// 0x0A LF
|
||||
0x0B: true,
|
||||
0x0C: true,
|
||||
// 0x0D CR
|
||||
0x0E: true,
|
||||
0x0F: true,
|
||||
0x10: true,
|
||||
0x11: true,
|
||||
0x12: true,
|
||||
0x13: true,
|
||||
0x14: true,
|
||||
0x15: true,
|
||||
0x16: true,
|
||||
0x17: true,
|
||||
0x18: true,
|
||||
0x19: true,
|
||||
0x1A: true,
|
||||
0x1B: true,
|
||||
0x1C: true,
|
||||
0x1D: true,
|
||||
0x1E: true,
|
||||
0x1F: true,
|
||||
// 0x20 - 0x7E Printable ASCII characters
|
||||
0x7F: true,
|
||||
}
|
||||
|
||||
func InvalidAscii(b byte) bool {
|
||||
return invalidAsciiTable[b]
|
||||
}
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
package toml
|
||||
package characters
|
||||
|
||||
import (
|
||||
"unicode/utf8"
|
||||
|
|
@ -32,7 +32,7 @@ func (u utf8Err) Zero() bool {
|
|||
// 0x9 => tab, ok
|
||||
// 0xA - 0x1F => invalid
|
||||
// 0x7F => invalid
|
||||
func utf8TomlValidAlreadyEscaped(p []byte) (err utf8Err) {
|
||||
func Utf8TomlValidAlreadyEscaped(p []byte) (err utf8Err) {
|
||||
// Fast path. Check for and skip 8 bytes of ASCII characters per iteration.
|
||||
offset := 0
|
||||
for len(p) >= 8 {
|
||||
|
|
@ -48,7 +48,7 @@ func utf8TomlValidAlreadyEscaped(p []byte) (err utf8Err) {
|
|||
}
|
||||
|
||||
for i, b := range p[:8] {
|
||||
if invalidAscii(b) {
|
||||
if InvalidAscii(b) {
|
||||
err.Index = offset + i
|
||||
err.Size = 1
|
||||
return
|
||||
|
|
@ -62,7 +62,7 @@ func utf8TomlValidAlreadyEscaped(p []byte) (err utf8Err) {
|
|||
for i := 0; i < n; {
|
||||
pi := p[i]
|
||||
if pi < utf8.RuneSelf {
|
||||
if invalidAscii(pi) {
|
||||
if InvalidAscii(pi) {
|
||||
err.Index = offset + i
|
||||
err.Size = 1
|
||||
return
|
||||
|
|
@ -106,11 +106,11 @@ func utf8TomlValidAlreadyEscaped(p []byte) (err utf8Err) {
|
|||
}
|
||||
|
||||
// Return the size of the next rune if valid, 0 otherwise.
|
||||
func utf8ValidNext(p []byte) int {
|
||||
func Utf8ValidNext(p []byte) int {
|
||||
c := p[0]
|
||||
|
||||
if c < utf8.RuneSelf {
|
||||
if invalidAscii(c) {
|
||||
if InvalidAscii(c) {
|
||||
return 0
|
||||
}
|
||||
return 1
|
||||
|
|
@ -140,47 +140,6 @@ func utf8ValidNext(p []byte) int {
|
|||
return size
|
||||
}
|
||||
|
||||
var invalidAsciiTable = [256]bool{
|
||||
0x00: true,
|
||||
0x01: true,
|
||||
0x02: true,
|
||||
0x03: true,
|
||||
0x04: true,
|
||||
0x05: true,
|
||||
0x06: true,
|
||||
0x07: true,
|
||||
0x08: true,
|
||||
// 0x09 TAB
|
||||
// 0x0A LF
|
||||
0x0B: true,
|
||||
0x0C: true,
|
||||
// 0x0D CR
|
||||
0x0E: true,
|
||||
0x0F: true,
|
||||
0x10: true,
|
||||
0x11: true,
|
||||
0x12: true,
|
||||
0x13: true,
|
||||
0x14: true,
|
||||
0x15: true,
|
||||
0x16: true,
|
||||
0x17: true,
|
||||
0x18: true,
|
||||
0x19: true,
|
||||
0x1A: true,
|
||||
0x1B: true,
|
||||
0x1C: true,
|
||||
0x1D: true,
|
||||
0x1E: true,
|
||||
0x1F: true,
|
||||
// 0x20 - 0x7E Printable ASCII characters
|
||||
0x7F: true,
|
||||
}
|
||||
|
||||
func invalidAscii(b byte) bool {
|
||||
return invalidAsciiTable[b]
|
||||
}
|
||||
|
||||
// acceptRange gives the range of valid values for the second byte in a UTF-8
|
||||
// sequence.
|
||||
type acceptRange struct {
|
||||
|
|
@ -1,8 +1,6 @@
|
|||
package tracker
|
||||
|
||||
import (
|
||||
"github.com/pelletier/go-toml/v2/internal/ast"
|
||||
)
|
||||
import "github.com/pelletier/go-toml/v2/unstable"
|
||||
|
||||
// KeyTracker is a tracker that keeps track of the current Key as the AST is
|
||||
// walked.
|
||||
|
|
@ -11,19 +9,19 @@ type KeyTracker struct {
|
|||
}
|
||||
|
||||
// UpdateTable sets the state of the tracker with the AST table node.
|
||||
func (t *KeyTracker) UpdateTable(node *ast.Node) {
|
||||
func (t *KeyTracker) UpdateTable(node *unstable.Node) {
|
||||
t.reset()
|
||||
t.Push(node)
|
||||
}
|
||||
|
||||
// UpdateArrayTable sets the state of the tracker with the AST array table node.
|
||||
func (t *KeyTracker) UpdateArrayTable(node *ast.Node) {
|
||||
func (t *KeyTracker) UpdateArrayTable(node *unstable.Node) {
|
||||
t.reset()
|
||||
t.Push(node)
|
||||
}
|
||||
|
||||
// Push the given key on the stack.
|
||||
func (t *KeyTracker) Push(node *ast.Node) {
|
||||
func (t *KeyTracker) Push(node *unstable.Node) {
|
||||
it := node.Key()
|
||||
for it.Next() {
|
||||
t.k = append(t.k, string(it.Node().Data))
|
||||
|
|
@ -31,7 +29,7 @@ func (t *KeyTracker) Push(node *ast.Node) {
|
|||
}
|
||||
|
||||
// Pop key from stack.
|
||||
func (t *KeyTracker) Pop(node *ast.Node) {
|
||||
func (t *KeyTracker) Pop(node *unstable.Node) {
|
||||
it := node.Key()
|
||||
for it.Next() {
|
||||
t.k = t.k[:len(t.k)-1]
|
||||
|
|
|
|||
|
|
@ -5,7 +5,7 @@ import (
|
|||
"fmt"
|
||||
"sync"
|
||||
|
||||
"github.com/pelletier/go-toml/v2/internal/ast"
|
||||
"github.com/pelletier/go-toml/v2/unstable"
|
||||
)
|
||||
|
||||
type keyKind uint8
|
||||
|
|
@ -150,23 +150,23 @@ func (s *SeenTracker) setExplicitFlag(parentIdx int) {
|
|||
// CheckExpression takes a top-level node and checks that it does not contain
|
||||
// keys that have been seen in previous calls, and validates that types are
|
||||
// consistent.
|
||||
func (s *SeenTracker) CheckExpression(node *ast.Node) error {
|
||||
func (s *SeenTracker) CheckExpression(node *unstable.Node) error {
|
||||
if s.entries == nil {
|
||||
s.reset()
|
||||
}
|
||||
switch node.Kind {
|
||||
case ast.KeyValue:
|
||||
case unstable.KeyValue:
|
||||
return s.checkKeyValue(node)
|
||||
case ast.Table:
|
||||
case unstable.Table:
|
||||
return s.checkTable(node)
|
||||
case ast.ArrayTable:
|
||||
case unstable.ArrayTable:
|
||||
return s.checkArrayTable(node)
|
||||
default:
|
||||
panic(fmt.Errorf("this should not be a top level node type: %s", node.Kind))
|
||||
}
|
||||
}
|
||||
|
||||
func (s *SeenTracker) checkTable(node *ast.Node) error {
|
||||
func (s *SeenTracker) checkTable(node *unstable.Node) error {
|
||||
if s.currentIdx >= 0 {
|
||||
s.setExplicitFlag(s.currentIdx)
|
||||
}
|
||||
|
|
@ -219,7 +219,7 @@ func (s *SeenTracker) checkTable(node *ast.Node) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (s *SeenTracker) checkArrayTable(node *ast.Node) error {
|
||||
func (s *SeenTracker) checkArrayTable(node *unstable.Node) error {
|
||||
if s.currentIdx >= 0 {
|
||||
s.setExplicitFlag(s.currentIdx)
|
||||
}
|
||||
|
|
@ -267,7 +267,7 @@ func (s *SeenTracker) checkArrayTable(node *ast.Node) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (s *SeenTracker) checkKeyValue(node *ast.Node) error {
|
||||
func (s *SeenTracker) checkKeyValue(node *unstable.Node) error {
|
||||
parentIdx := s.currentIdx
|
||||
it := node.Key()
|
||||
|
||||
|
|
@ -297,26 +297,26 @@ func (s *SeenTracker) checkKeyValue(node *ast.Node) error {
|
|||
value := node.Value()
|
||||
|
||||
switch value.Kind {
|
||||
case ast.InlineTable:
|
||||
case unstable.InlineTable:
|
||||
return s.checkInlineTable(value)
|
||||
case ast.Array:
|
||||
case unstable.Array:
|
||||
return s.checkArray(value)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *SeenTracker) checkArray(node *ast.Node) error {
|
||||
func (s *SeenTracker) checkArray(node *unstable.Node) error {
|
||||
it := node.Children()
|
||||
for it.Next() {
|
||||
n := it.Node()
|
||||
switch n.Kind {
|
||||
case ast.InlineTable:
|
||||
case unstable.InlineTable:
|
||||
err := s.checkInlineTable(n)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
case ast.Array:
|
||||
case unstable.Array:
|
||||
err := s.checkArray(n)
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
@ -326,7 +326,7 @@ func (s *SeenTracker) checkArray(node *ast.Node) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (s *SeenTracker) checkInlineTable(node *ast.Node) error {
|
||||
func (s *SeenTracker) checkInlineTable(node *unstable.Node) error {
|
||||
if pool.New == nil {
|
||||
pool.New = func() interface{} {
|
||||
return &SeenTracker{}
|
||||
|
|
|
|||
|
|
@ -4,6 +4,8 @@ import (
|
|||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/pelletier/go-toml/v2/unstable"
|
||||
)
|
||||
|
||||
// LocalDate represents a calendar day in no specific timezone.
|
||||
|
|
@ -75,7 +77,7 @@ func (d LocalTime) MarshalText() ([]byte, error) {
|
|||
func (d *LocalTime) UnmarshalText(b []byte) error {
|
||||
res, left, err := parseLocalTime(b)
|
||||
if err == nil && len(left) != 0 {
|
||||
err = newDecodeError(left, "extra characters")
|
||||
err = unstable.NewParserError(left, "extra characters")
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
@ -109,7 +111,7 @@ func (d LocalDateTime) MarshalText() ([]byte, error) {
|
|||
func (d *LocalDateTime) UnmarshalText(data []byte) error {
|
||||
res, left, err := parseLocalDateTime(data)
|
||||
if err == nil && len(left) != 0 {
|
||||
err = newDecodeError(left, "extra characters")
|
||||
err = unstable.NewParserError(left, "extra characters")
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
|
|||
|
|
@ -12,6 +12,8 @@ import (
|
|||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
|
||||
"github.com/pelletier/go-toml/v2/internal/characters"
|
||||
)
|
||||
|
||||
// Marshal serializes a Go value as a TOML document.
|
||||
|
|
@ -271,7 +273,7 @@ func (enc *Encoder) encode(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, e
|
|||
return enc.encodeMap(b, ctx, v)
|
||||
case reflect.Struct:
|
||||
return enc.encodeStruct(b, ctx, v)
|
||||
case reflect.Slice:
|
||||
case reflect.Slice, reflect.Array:
|
||||
return enc.encodeSlice(b, ctx, v)
|
||||
case reflect.Interface:
|
||||
if v.IsNil() {
|
||||
|
|
@ -355,9 +357,9 @@ func (enc *Encoder) encodeKv(b []byte, ctx encoderCtx, options valueOptions, v r
|
|||
|
||||
if !ctx.inline {
|
||||
b = enc.encodeComment(ctx.indent, options.comment, b)
|
||||
b = enc.indent(ctx.indent, b)
|
||||
}
|
||||
|
||||
b = enc.indent(ctx.indent, b)
|
||||
b = enc.encodeKey(b, ctx.key)
|
||||
b = append(b, " = "...)
|
||||
|
||||
|
|
@ -437,7 +439,7 @@ func (enc *Encoder) encodeString(b []byte, v string, options valueOptions) []byt
|
|||
func needsQuoting(v string) bool {
|
||||
// TODO: vectorize
|
||||
for _, b := range []byte(v) {
|
||||
if b == '\'' || b == '\r' || b == '\n' || invalidAscii(b) {
|
||||
if b == '\'' || b == '\r' || b == '\n' || characters.InvalidAscii(b) {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
|
@ -575,11 +577,23 @@ func (enc *Encoder) encodeKey(b []byte, k string) []byte {
|
|||
}
|
||||
}
|
||||
|
||||
func (enc *Encoder) encodeMap(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) {
|
||||
if v.Type().Key().Kind() != reflect.String {
|
||||
return nil, fmt.Errorf("toml: type %s is not supported as a map key", v.Type().Key().Kind())
|
||||
}
|
||||
func (enc *Encoder) keyToString(k reflect.Value) (string, error) {
|
||||
keyType := k.Type()
|
||||
switch {
|
||||
case keyType.Kind() == reflect.String:
|
||||
return k.String(), nil
|
||||
|
||||
case keyType.Implements(textMarshalerType):
|
||||
keyB, err := k.Interface().(encoding.TextMarshaler).MarshalText()
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("toml: error marshalling key %v from text: %w", k, err)
|
||||
}
|
||||
return string(keyB), nil
|
||||
}
|
||||
return "", fmt.Errorf("toml: type %s is not supported as a map key", keyType.Kind())
|
||||
}
|
||||
|
||||
func (enc *Encoder) encodeMap(b []byte, ctx encoderCtx, v reflect.Value) ([]byte, error) {
|
||||
var (
|
||||
t table
|
||||
emptyValueOptions valueOptions
|
||||
|
|
@ -587,13 +601,17 @@ func (enc *Encoder) encodeMap(b []byte, ctx encoderCtx, v reflect.Value) ([]byte
|
|||
|
||||
iter := v.MapRange()
|
||||
for iter.Next() {
|
||||
k := iter.Key().String()
|
||||
v := iter.Value()
|
||||
|
||||
if isNil(v) {
|
||||
continue
|
||||
}
|
||||
|
||||
k, err := enc.keyToString(iter.Key())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if willConvertToTableOrArrayTable(ctx, v) {
|
||||
t.pushTable(k, v, emptyValueOptions)
|
||||
} else {
|
||||
|
|
@ -912,7 +930,7 @@ func willConvertToTableOrArrayTable(ctx encoderCtx, v reflect.Value) bool {
|
|||
return willConvertToTableOrArrayTable(ctx, v.Elem())
|
||||
}
|
||||
|
||||
if t.Kind() == reflect.Slice {
|
||||
if t.Kind() == reflect.Slice || t.Kind() == reflect.Array {
|
||||
if v.Len() == 0 {
|
||||
// An empty slice should be a kv = [].
|
||||
return false
|
||||
|
|
|
|||
|
|
@ -1,9 +1,9 @@
|
|||
package toml
|
||||
|
||||
import (
|
||||
"github.com/pelletier/go-toml/v2/internal/ast"
|
||||
"github.com/pelletier/go-toml/v2/internal/danger"
|
||||
"github.com/pelletier/go-toml/v2/internal/tracker"
|
||||
"github.com/pelletier/go-toml/v2/unstable"
|
||||
)
|
||||
|
||||
type strict struct {
|
||||
|
|
@ -12,10 +12,10 @@ type strict struct {
|
|||
// Tracks the current key being processed.
|
||||
key tracker.KeyTracker
|
||||
|
||||
missing []decodeError
|
||||
missing []unstable.ParserError
|
||||
}
|
||||
|
||||
func (s *strict) EnterTable(node *ast.Node) {
|
||||
func (s *strict) EnterTable(node *unstable.Node) {
|
||||
if !s.Enabled {
|
||||
return
|
||||
}
|
||||
|
|
@ -23,7 +23,7 @@ func (s *strict) EnterTable(node *ast.Node) {
|
|||
s.key.UpdateTable(node)
|
||||
}
|
||||
|
||||
func (s *strict) EnterArrayTable(node *ast.Node) {
|
||||
func (s *strict) EnterArrayTable(node *unstable.Node) {
|
||||
if !s.Enabled {
|
||||
return
|
||||
}
|
||||
|
|
@ -31,7 +31,7 @@ func (s *strict) EnterArrayTable(node *ast.Node) {
|
|||
s.key.UpdateArrayTable(node)
|
||||
}
|
||||
|
||||
func (s *strict) EnterKeyValue(node *ast.Node) {
|
||||
func (s *strict) EnterKeyValue(node *unstable.Node) {
|
||||
if !s.Enabled {
|
||||
return
|
||||
}
|
||||
|
|
@ -39,7 +39,7 @@ func (s *strict) EnterKeyValue(node *ast.Node) {
|
|||
s.key.Push(node)
|
||||
}
|
||||
|
||||
func (s *strict) ExitKeyValue(node *ast.Node) {
|
||||
func (s *strict) ExitKeyValue(node *unstable.Node) {
|
||||
if !s.Enabled {
|
||||
return
|
||||
}
|
||||
|
|
@ -47,27 +47,27 @@ func (s *strict) ExitKeyValue(node *ast.Node) {
|
|||
s.key.Pop(node)
|
||||
}
|
||||
|
||||
func (s *strict) MissingTable(node *ast.Node) {
|
||||
func (s *strict) MissingTable(node *unstable.Node) {
|
||||
if !s.Enabled {
|
||||
return
|
||||
}
|
||||
|
||||
s.missing = append(s.missing, decodeError{
|
||||
highlight: keyLocation(node),
|
||||
message: "missing table",
|
||||
key: s.key.Key(),
|
||||
s.missing = append(s.missing, unstable.ParserError{
|
||||
Highlight: keyLocation(node),
|
||||
Message: "missing table",
|
||||
Key: s.key.Key(),
|
||||
})
|
||||
}
|
||||
|
||||
func (s *strict) MissingField(node *ast.Node) {
|
||||
func (s *strict) MissingField(node *unstable.Node) {
|
||||
if !s.Enabled {
|
||||
return
|
||||
}
|
||||
|
||||
s.missing = append(s.missing, decodeError{
|
||||
highlight: keyLocation(node),
|
||||
message: "missing field",
|
||||
key: s.key.Key(),
|
||||
s.missing = append(s.missing, unstable.ParserError{
|
||||
Highlight: keyLocation(node),
|
||||
Message: "missing field",
|
||||
Key: s.key.Key(),
|
||||
})
|
||||
}
|
||||
|
||||
|
|
@ -88,7 +88,7 @@ func (s *strict) Error(doc []byte) error {
|
|||
return err
|
||||
}
|
||||
|
||||
func keyLocation(node *ast.Node) []byte {
|
||||
func keyLocation(node *unstable.Node) []byte {
|
||||
k := node.Key()
|
||||
|
||||
hasOne := k.Next()
|
||||
|
|
|
|||
|
|
@ -6,9 +6,9 @@ import (
|
|||
"time"
|
||||
)
|
||||
|
||||
var timeType = reflect.TypeOf(time.Time{})
|
||||
var textMarshalerType = reflect.TypeOf(new(encoding.TextMarshaler)).Elem()
|
||||
var textUnmarshalerType = reflect.TypeOf(new(encoding.TextUnmarshaler)).Elem()
|
||||
var mapStringInterfaceType = reflect.TypeOf(map[string]interface{}{})
|
||||
var sliceInterfaceType = reflect.TypeOf([]interface{}{})
|
||||
var timeType = reflect.TypeOf((*time.Time)(nil)).Elem()
|
||||
var textMarshalerType = reflect.TypeOf((*encoding.TextMarshaler)(nil)).Elem()
|
||||
var textUnmarshalerType = reflect.TypeOf((*encoding.TextUnmarshaler)(nil)).Elem()
|
||||
var mapStringInterfaceType = reflect.TypeOf(map[string]interface{}(nil))
|
||||
var sliceInterfaceType = reflect.TypeOf([]interface{}(nil))
|
||||
var stringType = reflect.TypeOf("")
|
||||
|
|
|
|||
|
|
@ -12,16 +12,16 @@ import (
|
|||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"github.com/pelletier/go-toml/v2/internal/ast"
|
||||
"github.com/pelletier/go-toml/v2/internal/danger"
|
||||
"github.com/pelletier/go-toml/v2/internal/tracker"
|
||||
"github.com/pelletier/go-toml/v2/unstable"
|
||||
)
|
||||
|
||||
// Unmarshal deserializes a TOML document into a Go value.
|
||||
//
|
||||
// It is a shortcut for Decoder.Decode() with the default options.
|
||||
func Unmarshal(data []byte, v interface{}) error {
|
||||
p := parser{}
|
||||
p := unstable.Parser{}
|
||||
p.Reset(data)
|
||||
d := decoder{p: &p}
|
||||
|
||||
|
|
@ -60,7 +60,7 @@ func (d *Decoder) DisallowUnknownFields() *Decoder {
|
|||
// are ignored. See Decoder.DisallowUnknownFields() to change this behavior.
|
||||
//
|
||||
// When a TOML local date, time, or date-time is decoded into a time.Time, its
|
||||
// value is represented in time.Local timezone. Otherwise the approriate Local*
|
||||
// value is represented in time.Local timezone. Otherwise the appropriate Local*
|
||||
// structure is used. For time values, precision up to the nanosecond is
|
||||
// supported by truncating extra digits.
|
||||
//
|
||||
|
|
@ -101,7 +101,7 @@ func (d *Decoder) Decode(v interface{}) error {
|
|||
return fmt.Errorf("toml: %w", err)
|
||||
}
|
||||
|
||||
p := parser{}
|
||||
p := unstable.Parser{}
|
||||
p.Reset(b)
|
||||
dec := decoder{
|
||||
p: &p,
|
||||
|
|
@ -115,7 +115,7 @@ func (d *Decoder) Decode(v interface{}) error {
|
|||
|
||||
type decoder struct {
|
||||
// Which parser instance in use for this decoding session.
|
||||
p *parser
|
||||
p *unstable.Parser
|
||||
|
||||
// Flag indicating that the current expression is stashed.
|
||||
// If set to true, calling nextExpr will not actually pull a new expression
|
||||
|
|
@ -157,7 +157,7 @@ func (d *decoder) typeMismatchError(toml string, target reflect.Type) error {
|
|||
return fmt.Errorf("toml: cannot decode TOML %s into a Go value of type %s", toml, target)
|
||||
}
|
||||
|
||||
func (d *decoder) expr() *ast.Node {
|
||||
func (d *decoder) expr() *unstable.Node {
|
||||
return d.p.Expression()
|
||||
}
|
||||
|
||||
|
|
@ -208,12 +208,12 @@ func (d *decoder) FromParser(v interface{}) error {
|
|||
|
||||
err := d.fromParser(r)
|
||||
if err == nil {
|
||||
return d.strict.Error(d.p.data)
|
||||
return d.strict.Error(d.p.Data())
|
||||
}
|
||||
|
||||
var e *decodeError
|
||||
var e *unstable.ParserError
|
||||
if errors.As(err, &e) {
|
||||
return wrapDecodeError(d.p.data, e)
|
||||
return wrapDecodeError(d.p.Data(), e)
|
||||
}
|
||||
|
||||
return err
|
||||
|
|
@ -234,16 +234,16 @@ func (d *decoder) fromParser(root reflect.Value) error {
|
|||
Rules for the unmarshal code:
|
||||
|
||||
- The stack is used to keep track of which values need to be set where.
|
||||
- handle* functions <=> switch on a given ast.Kind.
|
||||
- handle* functions <=> switch on a given unstable.Kind.
|
||||
- unmarshalX* functions need to unmarshal a node of kind X.
|
||||
- An "object" is either a struct or a map.
|
||||
*/
|
||||
|
||||
func (d *decoder) handleRootExpression(expr *ast.Node, v reflect.Value) error {
|
||||
func (d *decoder) handleRootExpression(expr *unstable.Node, v reflect.Value) error {
|
||||
var x reflect.Value
|
||||
var err error
|
||||
|
||||
if !(d.skipUntilTable && expr.Kind == ast.KeyValue) {
|
||||
if !(d.skipUntilTable && expr.Kind == unstable.KeyValue) {
|
||||
err = d.seen.CheckExpression(expr)
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
@ -251,16 +251,16 @@ func (d *decoder) handleRootExpression(expr *ast.Node, v reflect.Value) error {
|
|||
}
|
||||
|
||||
switch expr.Kind {
|
||||
case ast.KeyValue:
|
||||
case unstable.KeyValue:
|
||||
if d.skipUntilTable {
|
||||
return nil
|
||||
}
|
||||
x, err = d.handleKeyValue(expr, v)
|
||||
case ast.Table:
|
||||
case unstable.Table:
|
||||
d.skipUntilTable = false
|
||||
d.strict.EnterTable(expr)
|
||||
x, err = d.handleTable(expr.Key(), v)
|
||||
case ast.ArrayTable:
|
||||
case unstable.ArrayTable:
|
||||
d.skipUntilTable = false
|
||||
d.strict.EnterArrayTable(expr)
|
||||
x, err = d.handleArrayTable(expr.Key(), v)
|
||||
|
|
@ -269,7 +269,7 @@ func (d *decoder) handleRootExpression(expr *ast.Node, v reflect.Value) error {
|
|||
}
|
||||
|
||||
if d.skipUntilTable {
|
||||
if expr.Kind == ast.Table || expr.Kind == ast.ArrayTable {
|
||||
if expr.Kind == unstable.Table || expr.Kind == unstable.ArrayTable {
|
||||
d.strict.MissingTable(expr)
|
||||
}
|
||||
} else if err == nil && x.IsValid() {
|
||||
|
|
@ -279,14 +279,14 @@ func (d *decoder) handleRootExpression(expr *ast.Node, v reflect.Value) error {
|
|||
return err
|
||||
}
|
||||
|
||||
func (d *decoder) handleArrayTable(key ast.Iterator, v reflect.Value) (reflect.Value, error) {
|
||||
func (d *decoder) handleArrayTable(key unstable.Iterator, v reflect.Value) (reflect.Value, error) {
|
||||
if key.Next() {
|
||||
return d.handleArrayTablePart(key, v)
|
||||
}
|
||||
return d.handleKeyValues(v)
|
||||
}
|
||||
|
||||
func (d *decoder) handleArrayTableCollectionLast(key ast.Iterator, v reflect.Value) (reflect.Value, error) {
|
||||
func (d *decoder) handleArrayTableCollectionLast(key unstable.Iterator, v reflect.Value) (reflect.Value, error) {
|
||||
switch v.Kind() {
|
||||
case reflect.Interface:
|
||||
elem := v.Elem()
|
||||
|
|
@ -339,13 +339,13 @@ func (d *decoder) handleArrayTableCollectionLast(key ast.Iterator, v reflect.Val
|
|||
case reflect.Array:
|
||||
idx := d.arrayIndex(true, v)
|
||||
if idx >= v.Len() {
|
||||
return v, fmt.Errorf("toml: cannot decode array table into %s at position %d", v.Type(), idx)
|
||||
return v, fmt.Errorf("%s at position %d", d.typeMismatchError("array table", v.Type()), idx)
|
||||
}
|
||||
elem := v.Index(idx)
|
||||
_, err := d.handleArrayTable(key, elem)
|
||||
return v, err
|
||||
default:
|
||||
return reflect.Value{}, fmt.Errorf("toml: cannot decode array table into a %s", v.Type())
|
||||
return reflect.Value{}, d.typeMismatchError("array table", v.Type())
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -353,7 +353,7 @@ func (d *decoder) handleArrayTableCollectionLast(key ast.Iterator, v reflect.Val
|
|||
// evaluated like a normal key, but if it returns a collection, it also needs to
|
||||
// point to the last element of the collection. Unless it is the last part of
|
||||
// the key, then it needs to create a new element at the end.
|
||||
func (d *decoder) handleArrayTableCollection(key ast.Iterator, v reflect.Value) (reflect.Value, error) {
|
||||
func (d *decoder) handleArrayTableCollection(key unstable.Iterator, v reflect.Value) (reflect.Value, error) {
|
||||
if key.IsLast() {
|
||||
return d.handleArrayTableCollectionLast(key, v)
|
||||
}
|
||||
|
|
@ -390,7 +390,7 @@ func (d *decoder) handleArrayTableCollection(key ast.Iterator, v reflect.Value)
|
|||
case reflect.Array:
|
||||
idx := d.arrayIndex(false, v)
|
||||
if idx >= v.Len() {
|
||||
return v, fmt.Errorf("toml: cannot decode array table into %s at position %d", v.Type(), idx)
|
||||
return v, fmt.Errorf("%s at position %d", d.typeMismatchError("array table", v.Type()), idx)
|
||||
}
|
||||
elem := v.Index(idx)
|
||||
_, err := d.handleArrayTable(key, elem)
|
||||
|
|
@ -400,7 +400,7 @@ func (d *decoder) handleArrayTableCollection(key ast.Iterator, v reflect.Value)
|
|||
return d.handleArrayTable(key, v)
|
||||
}
|
||||
|
||||
func (d *decoder) handleKeyPart(key ast.Iterator, v reflect.Value, nextFn handlerFn, makeFn valueMakerFn) (reflect.Value, error) {
|
||||
func (d *decoder) handleKeyPart(key unstable.Iterator, v reflect.Value, nextFn handlerFn, makeFn valueMakerFn) (reflect.Value, error) {
|
||||
var rv reflect.Value
|
||||
|
||||
// First, dispatch over v to make sure it is a valid object.
|
||||
|
|
@ -417,7 +417,10 @@ func (d *decoder) handleKeyPart(key ast.Iterator, v reflect.Value, nextFn handle
|
|||
vt := v.Type()
|
||||
|
||||
// Create the key for the map element. Convert to key type.
|
||||
mk := reflect.ValueOf(string(key.Node().Data)).Convert(vt.Key())
|
||||
mk, err := d.keyFromData(vt.Key(), key.Node().Data)
|
||||
if err != nil {
|
||||
return reflect.Value{}, err
|
||||
}
|
||||
|
||||
// If the map does not exist, create it.
|
||||
if v.IsNil() {
|
||||
|
|
@ -518,7 +521,7 @@ func (d *decoder) handleKeyPart(key ast.Iterator, v reflect.Value, nextFn handle
|
|||
// HandleArrayTablePart navigates the Go structure v using the key v. It is
|
||||
// only used for the prefix (non-last) parts of an array-table. When
|
||||
// encountering a collection, it should go to the last element.
|
||||
func (d *decoder) handleArrayTablePart(key ast.Iterator, v reflect.Value) (reflect.Value, error) {
|
||||
func (d *decoder) handleArrayTablePart(key unstable.Iterator, v reflect.Value) (reflect.Value, error) {
|
||||
var makeFn valueMakerFn
|
||||
if key.IsLast() {
|
||||
makeFn = makeSliceInterface
|
||||
|
|
@ -530,10 +533,10 @@ func (d *decoder) handleArrayTablePart(key ast.Iterator, v reflect.Value) (refle
|
|||
|
||||
// HandleTable returns a reference when it has checked the next expression but
|
||||
// cannot handle it.
|
||||
func (d *decoder) handleTable(key ast.Iterator, v reflect.Value) (reflect.Value, error) {
|
||||
func (d *decoder) handleTable(key unstable.Iterator, v reflect.Value) (reflect.Value, error) {
|
||||
if v.Kind() == reflect.Slice {
|
||||
if v.Len() == 0 {
|
||||
return reflect.Value{}, newDecodeError(key.Node().Data, "cannot store a table in a slice")
|
||||
return reflect.Value{}, unstable.NewParserError(key.Node().Data, "cannot store a table in a slice")
|
||||
}
|
||||
elem := v.Index(v.Len() - 1)
|
||||
x, err := d.handleTable(key, elem)
|
||||
|
|
@ -560,7 +563,7 @@ func (d *decoder) handleKeyValues(v reflect.Value) (reflect.Value, error) {
|
|||
var rv reflect.Value
|
||||
for d.nextExpr() {
|
||||
expr := d.expr()
|
||||
if expr.Kind != ast.KeyValue {
|
||||
if expr.Kind != unstable.KeyValue {
|
||||
// Stash the expression so that fromParser can just loop and use
|
||||
// the right handler.
|
||||
// We could just recurse ourselves here, but at least this gives a
|
||||
|
|
@ -587,7 +590,7 @@ func (d *decoder) handleKeyValues(v reflect.Value) (reflect.Value, error) {
|
|||
}
|
||||
|
||||
type (
|
||||
handlerFn func(key ast.Iterator, v reflect.Value) (reflect.Value, error)
|
||||
handlerFn func(key unstable.Iterator, v reflect.Value) (reflect.Value, error)
|
||||
valueMakerFn func() reflect.Value
|
||||
)
|
||||
|
||||
|
|
@ -599,11 +602,11 @@ func makeSliceInterface() reflect.Value {
|
|||
return reflect.MakeSlice(sliceInterfaceType, 0, 16)
|
||||
}
|
||||
|
||||
func (d *decoder) handleTablePart(key ast.Iterator, v reflect.Value) (reflect.Value, error) {
|
||||
func (d *decoder) handleTablePart(key unstable.Iterator, v reflect.Value) (reflect.Value, error) {
|
||||
return d.handleKeyPart(key, v, d.handleTable, makeMapStringInterface)
|
||||
}
|
||||
|
||||
func (d *decoder) tryTextUnmarshaler(node *ast.Node, v reflect.Value) (bool, error) {
|
||||
func (d *decoder) tryTextUnmarshaler(node *unstable.Node, v reflect.Value) (bool, error) {
|
||||
// Special case for time, because we allow to unmarshal to it from
|
||||
// different kind of AST nodes.
|
||||
if v.Type() == timeType {
|
||||
|
|
@ -613,7 +616,7 @@ func (d *decoder) tryTextUnmarshaler(node *ast.Node, v reflect.Value) (bool, err
|
|||
if v.CanAddr() && v.Addr().Type().Implements(textUnmarshalerType) {
|
||||
err := v.Addr().Interface().(encoding.TextUnmarshaler).UnmarshalText(node.Data)
|
||||
if err != nil {
|
||||
return false, newDecodeError(d.p.Raw(node.Raw), "%w", err)
|
||||
return false, unstable.NewParserError(d.p.Raw(node.Raw), "%w", err)
|
||||
}
|
||||
|
||||
return true, nil
|
||||
|
|
@ -622,7 +625,7 @@ func (d *decoder) tryTextUnmarshaler(node *ast.Node, v reflect.Value) (bool, err
|
|||
return false, nil
|
||||
}
|
||||
|
||||
func (d *decoder) handleValue(value *ast.Node, v reflect.Value) error {
|
||||
func (d *decoder) handleValue(value *unstable.Node, v reflect.Value) error {
|
||||
for v.Kind() == reflect.Ptr {
|
||||
v = initAndDereferencePointer(v)
|
||||
}
|
||||
|
|
@ -633,32 +636,32 @@ func (d *decoder) handleValue(value *ast.Node, v reflect.Value) error {
|
|||
}
|
||||
|
||||
switch value.Kind {
|
||||
case ast.String:
|
||||
case unstable.String:
|
||||
return d.unmarshalString(value, v)
|
||||
case ast.Integer:
|
||||
case unstable.Integer:
|
||||
return d.unmarshalInteger(value, v)
|
||||
case ast.Float:
|
||||
case unstable.Float:
|
||||
return d.unmarshalFloat(value, v)
|
||||
case ast.Bool:
|
||||
case unstable.Bool:
|
||||
return d.unmarshalBool(value, v)
|
||||
case ast.DateTime:
|
||||
case unstable.DateTime:
|
||||
return d.unmarshalDateTime(value, v)
|
||||
case ast.LocalDate:
|
||||
case unstable.LocalDate:
|
||||
return d.unmarshalLocalDate(value, v)
|
||||
case ast.LocalTime:
|
||||
case unstable.LocalTime:
|
||||
return d.unmarshalLocalTime(value, v)
|
||||
case ast.LocalDateTime:
|
||||
case unstable.LocalDateTime:
|
||||
return d.unmarshalLocalDateTime(value, v)
|
||||
case ast.InlineTable:
|
||||
case unstable.InlineTable:
|
||||
return d.unmarshalInlineTable(value, v)
|
||||
case ast.Array:
|
||||
case unstable.Array:
|
||||
return d.unmarshalArray(value, v)
|
||||
default:
|
||||
panic(fmt.Errorf("handleValue not implemented for %s", value.Kind))
|
||||
}
|
||||
}
|
||||
|
||||
func (d *decoder) unmarshalArray(array *ast.Node, v reflect.Value) error {
|
||||
func (d *decoder) unmarshalArray(array *unstable.Node, v reflect.Value) error {
|
||||
switch v.Kind() {
|
||||
case reflect.Slice:
|
||||
if v.IsNil() {
|
||||
|
|
@ -729,7 +732,7 @@ func (d *decoder) unmarshalArray(array *ast.Node, v reflect.Value) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (d *decoder) unmarshalInlineTable(itable *ast.Node, v reflect.Value) error {
|
||||
func (d *decoder) unmarshalInlineTable(itable *unstable.Node, v reflect.Value) error {
|
||||
// Make sure v is an initialized object.
|
||||
switch v.Kind() {
|
||||
case reflect.Map:
|
||||
|
|
@ -746,7 +749,7 @@ func (d *decoder) unmarshalInlineTable(itable *ast.Node, v reflect.Value) error
|
|||
}
|
||||
return d.unmarshalInlineTable(itable, elem)
|
||||
default:
|
||||
return newDecodeError(itable.Data, "cannot store inline table in Go type %s", v.Kind())
|
||||
return unstable.NewParserError(d.p.Raw(itable.Raw), "cannot store inline table in Go type %s", v.Kind())
|
||||
}
|
||||
|
||||
it := itable.Children()
|
||||
|
|
@ -765,7 +768,7 @@ func (d *decoder) unmarshalInlineTable(itable *ast.Node, v reflect.Value) error
|
|||
return nil
|
||||
}
|
||||
|
||||
func (d *decoder) unmarshalDateTime(value *ast.Node, v reflect.Value) error {
|
||||
func (d *decoder) unmarshalDateTime(value *unstable.Node, v reflect.Value) error {
|
||||
dt, err := parseDateTime(value.Data)
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
@ -775,7 +778,7 @@ func (d *decoder) unmarshalDateTime(value *ast.Node, v reflect.Value) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (d *decoder) unmarshalLocalDate(value *ast.Node, v reflect.Value) error {
|
||||
func (d *decoder) unmarshalLocalDate(value *unstable.Node, v reflect.Value) error {
|
||||
ld, err := parseLocalDate(value.Data)
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
@ -792,28 +795,28 @@ func (d *decoder) unmarshalLocalDate(value *ast.Node, v reflect.Value) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (d *decoder) unmarshalLocalTime(value *ast.Node, v reflect.Value) error {
|
||||
func (d *decoder) unmarshalLocalTime(value *unstable.Node, v reflect.Value) error {
|
||||
lt, rest, err := parseLocalTime(value.Data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(rest) > 0 {
|
||||
return newDecodeError(rest, "extra characters at the end of a local time")
|
||||
return unstable.NewParserError(rest, "extra characters at the end of a local time")
|
||||
}
|
||||
|
||||
v.Set(reflect.ValueOf(lt))
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *decoder) unmarshalLocalDateTime(value *ast.Node, v reflect.Value) error {
|
||||
func (d *decoder) unmarshalLocalDateTime(value *unstable.Node, v reflect.Value) error {
|
||||
ldt, rest, err := parseLocalDateTime(value.Data)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(rest) > 0 {
|
||||
return newDecodeError(rest, "extra characters at the end of a local date time")
|
||||
return unstable.NewParserError(rest, "extra characters at the end of a local date time")
|
||||
}
|
||||
|
||||
if v.Type() == timeType {
|
||||
|
|
@ -828,7 +831,7 @@ func (d *decoder) unmarshalLocalDateTime(value *ast.Node, v reflect.Value) error
|
|||
return nil
|
||||
}
|
||||
|
||||
func (d *decoder) unmarshalBool(value *ast.Node, v reflect.Value) error {
|
||||
func (d *decoder) unmarshalBool(value *unstable.Node, v reflect.Value) error {
|
||||
b := value.Data[0] == 't'
|
||||
|
||||
switch v.Kind() {
|
||||
|
|
@ -837,13 +840,13 @@ func (d *decoder) unmarshalBool(value *ast.Node, v reflect.Value) error {
|
|||
case reflect.Interface:
|
||||
v.Set(reflect.ValueOf(b))
|
||||
default:
|
||||
return newDecodeError(value.Data, "cannot assign boolean to a %t", b)
|
||||
return unstable.NewParserError(value.Data, "cannot assign boolean to a %t", b)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *decoder) unmarshalFloat(value *ast.Node, v reflect.Value) error {
|
||||
func (d *decoder) unmarshalFloat(value *unstable.Node, v reflect.Value) error {
|
||||
f, err := parseFloat(value.Data)
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
@ -854,13 +857,13 @@ func (d *decoder) unmarshalFloat(value *ast.Node, v reflect.Value) error {
|
|||
v.SetFloat(f)
|
||||
case reflect.Float32:
|
||||
if f > math.MaxFloat32 {
|
||||
return newDecodeError(value.Data, "number %f does not fit in a float32", f)
|
||||
return unstable.NewParserError(value.Data, "number %f does not fit in a float32", f)
|
||||
}
|
||||
v.SetFloat(f)
|
||||
case reflect.Interface:
|
||||
v.Set(reflect.ValueOf(f))
|
||||
default:
|
||||
return newDecodeError(value.Data, "float cannot be assigned to %s", v.Kind())
|
||||
return unstable.NewParserError(value.Data, "float cannot be assigned to %s", v.Kind())
|
||||
}
|
||||
|
||||
return nil
|
||||
|
|
@ -886,7 +889,12 @@ func init() {
|
|||
}
|
||||
}
|
||||
|
||||
func (d *decoder) unmarshalInteger(value *ast.Node, v reflect.Value) error {
|
||||
func (d *decoder) unmarshalInteger(value *unstable.Node, v reflect.Value) error {
|
||||
kind := v.Kind()
|
||||
if kind == reflect.Float32 || kind == reflect.Float64 {
|
||||
return d.unmarshalFloat(value, v)
|
||||
}
|
||||
|
||||
i, err := parseInteger(value.Data)
|
||||
if err != nil {
|
||||
return err
|
||||
|
|
@ -894,7 +902,7 @@ func (d *decoder) unmarshalInteger(value *ast.Node, v reflect.Value) error {
|
|||
|
||||
var r reflect.Value
|
||||
|
||||
switch v.Kind() {
|
||||
switch kind {
|
||||
case reflect.Int64:
|
||||
v.SetInt(i)
|
||||
return nil
|
||||
|
|
@ -967,20 +975,20 @@ func (d *decoder) unmarshalInteger(value *ast.Node, v reflect.Value) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (d *decoder) unmarshalString(value *ast.Node, v reflect.Value) error {
|
||||
func (d *decoder) unmarshalString(value *unstable.Node, v reflect.Value) error {
|
||||
switch v.Kind() {
|
||||
case reflect.String:
|
||||
v.SetString(string(value.Data))
|
||||
case reflect.Interface:
|
||||
v.Set(reflect.ValueOf(string(value.Data)))
|
||||
default:
|
||||
return newDecodeError(d.p.Raw(value.Raw), "cannot store TOML string into a Go %s", v.Kind())
|
||||
return unstable.NewParserError(d.p.Raw(value.Raw), "cannot store TOML string into a Go %s", v.Kind())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (d *decoder) handleKeyValue(expr *ast.Node, v reflect.Value) (reflect.Value, error) {
|
||||
func (d *decoder) handleKeyValue(expr *unstable.Node, v reflect.Value) (reflect.Value, error) {
|
||||
d.strict.EnterKeyValue(expr)
|
||||
|
||||
v, err := d.handleKeyValueInner(expr.Key(), expr.Value(), v)
|
||||
|
|
@ -994,7 +1002,7 @@ func (d *decoder) handleKeyValue(expr *ast.Node, v reflect.Value) (reflect.Value
|
|||
return v, err
|
||||
}
|
||||
|
||||
func (d *decoder) handleKeyValueInner(key ast.Iterator, value *ast.Node, v reflect.Value) (reflect.Value, error) {
|
||||
func (d *decoder) handleKeyValueInner(key unstable.Iterator, value *unstable.Node, v reflect.Value) (reflect.Value, error) {
|
||||
if key.Next() {
|
||||
// Still scoping the key
|
||||
return d.handleKeyValuePart(key, value, v)
|
||||
|
|
@ -1004,7 +1012,32 @@ func (d *decoder) handleKeyValueInner(key ast.Iterator, value *ast.Node, v refle
|
|||
return reflect.Value{}, d.handleValue(value, v)
|
||||
}
|
||||
|
||||
func (d *decoder) handleKeyValuePart(key ast.Iterator, value *ast.Node, v reflect.Value) (reflect.Value, error) {
|
||||
func (d *decoder) keyFromData(keyType reflect.Type, data []byte) (reflect.Value, error) {
|
||||
switch {
|
||||
case stringType.AssignableTo(keyType):
|
||||
return reflect.ValueOf(string(data)), nil
|
||||
|
||||
case stringType.ConvertibleTo(keyType):
|
||||
return reflect.ValueOf(string(data)).Convert(keyType), nil
|
||||
|
||||
case keyType.Implements(textUnmarshalerType):
|
||||
mk := reflect.New(keyType.Elem())
|
||||
if err := mk.Interface().(encoding.TextUnmarshaler).UnmarshalText(data); err != nil {
|
||||
return reflect.Value{}, fmt.Errorf("toml: error unmarshalling key type %s from text: %w", stringType, err)
|
||||
}
|
||||
return mk, nil
|
||||
|
||||
case reflect.PtrTo(keyType).Implements(textUnmarshalerType):
|
||||
mk := reflect.New(keyType)
|
||||
if err := mk.Interface().(encoding.TextUnmarshaler).UnmarshalText(data); err != nil {
|
||||
return reflect.Value{}, fmt.Errorf("toml: error unmarshalling key type %s from text: %w", stringType, err)
|
||||
}
|
||||
return mk.Elem(), nil
|
||||
}
|
||||
return reflect.Value{}, fmt.Errorf("toml: cannot convert map key of type %s to expected type %s", stringType, keyType)
|
||||
}
|
||||
|
||||
func (d *decoder) handleKeyValuePart(key unstable.Iterator, value *unstable.Node, v reflect.Value) (reflect.Value, error) {
|
||||
// contains the replacement for v
|
||||
var rv reflect.Value
|
||||
|
||||
|
|
@ -1014,16 +1047,9 @@ func (d *decoder) handleKeyValuePart(key ast.Iterator, value *ast.Node, v reflec
|
|||
case reflect.Map:
|
||||
vt := v.Type()
|
||||
|
||||
mk := reflect.ValueOf(string(key.Node().Data))
|
||||
mkt := stringType
|
||||
|
||||
keyType := vt.Key()
|
||||
if !mkt.AssignableTo(keyType) {
|
||||
if !mkt.ConvertibleTo(keyType) {
|
||||
return reflect.Value{}, fmt.Errorf("toml: cannot convert map key of type %s to expected type %s", mkt, keyType)
|
||||
}
|
||||
|
||||
mk = mk.Convert(keyType)
|
||||
mk, err := d.keyFromData(vt.Key(), key.Node().Data)
|
||||
if err != nil {
|
||||
return reflect.Value{}, err
|
||||
}
|
||||
|
||||
// If the map does not exist, create it.
|
||||
|
|
@ -1034,15 +1060,9 @@ func (d *decoder) handleKeyValuePart(key ast.Iterator, value *ast.Node, v reflec
|
|||
|
||||
mv := v.MapIndex(mk)
|
||||
set := false
|
||||
if !mv.IsValid() {
|
||||
if !mv.IsValid() || key.IsLast() {
|
||||
set = true
|
||||
mv = reflect.New(v.Type().Elem()).Elem()
|
||||
} else {
|
||||
if key.IsLast() {
|
||||
var x interface{}
|
||||
mv = reflect.ValueOf(&x).Elem()
|
||||
set = true
|
||||
}
|
||||
}
|
||||
|
||||
nv, err := d.handleKeyValueInner(key, value, mv)
|
||||
|
|
@ -1072,6 +1092,19 @@ func (d *decoder) handleKeyValuePart(key ast.Iterator, value *ast.Node, v reflec
|
|||
d.errorContext.Field = path
|
||||
|
||||
f := fieldByIndex(v, path)
|
||||
|
||||
if !f.CanSet() {
|
||||
// If the field is not settable, need to take a slower path and make a copy of
|
||||
// the struct itself to a new location.
|
||||
nvp := reflect.New(v.Type())
|
||||
nvp.Elem().Set(v)
|
||||
v = nvp.Elem()
|
||||
_, err := d.handleKeyValuePart(key, value, v)
|
||||
if err != nil {
|
||||
return reflect.Value{}, err
|
||||
}
|
||||
return nvp.Elem(), nil
|
||||
}
|
||||
x, err := d.handleKeyValueInner(key, value, f)
|
||||
if err != nil {
|
||||
return reflect.Value{}, err
|
||||
|
|
@ -1137,10 +1170,10 @@ func initAndDereferencePointer(v reflect.Value) reflect.Value {
|
|||
|
||||
// Same as reflect.Value.FieldByIndex, but creates pointers if needed.
|
||||
func fieldByIndex(v reflect.Value, path []int) reflect.Value {
|
||||
for i, x := range path {
|
||||
for _, x := range path {
|
||||
v = v.Field(x)
|
||||
|
||||
if i < len(path)-1 && v.Kind() == reflect.Ptr {
|
||||
if v.Kind() == reflect.Ptr {
|
||||
if v.IsNil() {
|
||||
v.Set(reflect.New(v.Type().Elem()))
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
package ast
|
||||
package unstable
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
|
@ -7,13 +7,16 @@ import (
|
|||
"github.com/pelletier/go-toml/v2/internal/danger"
|
||||
)
|
||||
|
||||
// Iterator starts uninitialized, you need to call Next() first.
|
||||
// Iterator over a sequence of nodes.
|
||||
//
|
||||
// Starts uninitialized, you need to call Next() first.
|
||||
//
|
||||
// For example:
|
||||
//
|
||||
// it := n.Children()
|
||||
// for it.Next() {
|
||||
// it.Node()
|
||||
// n := it.Node()
|
||||
// // do something with n
|
||||
// }
|
||||
type Iterator struct {
|
||||
started bool
|
||||
|
|
@ -32,42 +35,31 @@ func (c *Iterator) Next() bool {
|
|||
}
|
||||
|
||||
// IsLast returns true if the current node of the iterator is the last
|
||||
// one. Subsequent call to Next() will return false.
|
||||
// one. Subsequent calls to Next() will return false.
|
||||
func (c *Iterator) IsLast() bool {
|
||||
return c.node.next == 0
|
||||
}
|
||||
|
||||
// Node returns a copy of the node pointed at by the iterator.
|
||||
// Node returns a pointer to the node pointed at by the iterator.
|
||||
func (c *Iterator) Node() *Node {
|
||||
return c.node
|
||||
}
|
||||
|
||||
// Root contains a full AST.
|
||||
// Node in a TOML expression AST.
|
||||
//
|
||||
// It is immutable once constructed with Builder.
|
||||
type Root struct {
|
||||
nodes []Node
|
||||
}
|
||||
|
||||
// Iterator over the top level nodes.
|
||||
func (r *Root) Iterator() Iterator {
|
||||
it := Iterator{}
|
||||
if len(r.nodes) > 0 {
|
||||
it.node = &r.nodes[0]
|
||||
}
|
||||
return it
|
||||
}
|
||||
|
||||
func (r *Root) at(idx Reference) *Node {
|
||||
return &r.nodes[idx]
|
||||
}
|
||||
|
||||
// Arrays have one child per element in the array. InlineTables have
|
||||
// one child per key-value pair in the table. KeyValues have at least
|
||||
// two children. The first one is the value. The rest make a
|
||||
// potentially dotted key. Table and Array table have one child per
|
||||
// element of the key they represent (same as KeyValue, but without
|
||||
// the last node being the value).
|
||||
// Depending on Kind, its sequence of children should be interpreted
|
||||
// differently.
|
||||
//
|
||||
// - Array have one child per element in the array.
|
||||
// - InlineTable have one child per key-value in the table (each of kind
|
||||
// InlineTable).
|
||||
// - KeyValue have at least two children. The first one is the value. The rest
|
||||
// make a potentially dotted key.
|
||||
// - Table and ArrayTable's children represent a dotted key (same as
|
||||
// KeyValue, but without the first node being the value).
|
||||
//
|
||||
// When relevant, Raw describes the range of bytes this node is referring to in
|
||||
// the input document. Use Parser.Raw() to retrieve the actual bytes.
|
||||
type Node struct {
|
||||
Kind Kind
|
||||
Raw Range // Raw bytes from the input.
|
||||
|
|
@ -80,13 +72,13 @@ type Node struct {
|
|||
child int // 0 if no child
|
||||
}
|
||||
|
||||
// Range of bytes in the document.
|
||||
type Range struct {
|
||||
Offset uint32
|
||||
Length uint32
|
||||
}
|
||||
|
||||
// Next returns a copy of the next node, or an invalid Node if there
|
||||
// is no next node.
|
||||
// Next returns a pointer to the next node, or nil if there is no next node.
|
||||
func (n *Node) Next() *Node {
|
||||
if n.next == 0 {
|
||||
return nil
|
||||
|
|
@ -96,9 +88,9 @@ func (n *Node) Next() *Node {
|
|||
return (*Node)(danger.Stride(ptr, size, n.next))
|
||||
}
|
||||
|
||||
// Child returns a copy of the first child node of this node. Other
|
||||
// children can be accessed calling Next on the first child. Returns
|
||||
// an invalid Node if there is none.
|
||||
// Child returns a pointer to the first child node of this node. Other children
|
||||
// can be accessed calling Next on the first child. Returns an nil if this Node
|
||||
// has no child.
|
||||
func (n *Node) Child() *Node {
|
||||
if n.child == 0 {
|
||||
return nil
|
||||
|
|
@ -113,9 +105,9 @@ func (n *Node) Valid() bool {
|
|||
return n != nil
|
||||
}
|
||||
|
||||
// Key returns the child nodes making the Key on a supported
|
||||
// node. Panics otherwise. They are guaranteed to be all be of the
|
||||
// Kind Key. A simple key would return just one element.
|
||||
// Key returns the children nodes making the Key on a supported node. Panics
|
||||
// otherwise. They are guaranteed to be all be of the Kind Key. A simple key
|
||||
// would return just one element.
|
||||
func (n *Node) Key() Iterator {
|
||||
switch n.Kind {
|
||||
case KeyValue:
|
||||
|
|
@ -0,0 +1,71 @@
|
|||
package unstable
|
||||
|
||||
// root contains a full AST.
|
||||
//
|
||||
// It is immutable once constructed with Builder.
|
||||
type root struct {
|
||||
nodes []Node
|
||||
}
|
||||
|
||||
// Iterator over the top level nodes.
|
||||
func (r *root) Iterator() Iterator {
|
||||
it := Iterator{}
|
||||
if len(r.nodes) > 0 {
|
||||
it.node = &r.nodes[0]
|
||||
}
|
||||
return it
|
||||
}
|
||||
|
||||
func (r *root) at(idx reference) *Node {
|
||||
return &r.nodes[idx]
|
||||
}
|
||||
|
||||
type reference int
|
||||
|
||||
const invalidReference reference = -1
|
||||
|
||||
func (r reference) Valid() bool {
|
||||
return r != invalidReference
|
||||
}
|
||||
|
||||
type builder struct {
|
||||
tree root
|
||||
lastIdx int
|
||||
}
|
||||
|
||||
func (b *builder) Tree() *root {
|
||||
return &b.tree
|
||||
}
|
||||
|
||||
func (b *builder) NodeAt(ref reference) *Node {
|
||||
return b.tree.at(ref)
|
||||
}
|
||||
|
||||
func (b *builder) Reset() {
|
||||
b.tree.nodes = b.tree.nodes[:0]
|
||||
b.lastIdx = 0
|
||||
}
|
||||
|
||||
func (b *builder) Push(n Node) reference {
|
||||
b.lastIdx = len(b.tree.nodes)
|
||||
b.tree.nodes = append(b.tree.nodes, n)
|
||||
return reference(b.lastIdx)
|
||||
}
|
||||
|
||||
func (b *builder) PushAndChain(n Node) reference {
|
||||
newIdx := len(b.tree.nodes)
|
||||
b.tree.nodes = append(b.tree.nodes, n)
|
||||
if b.lastIdx >= 0 {
|
||||
b.tree.nodes[b.lastIdx].next = newIdx - b.lastIdx
|
||||
}
|
||||
b.lastIdx = newIdx
|
||||
return reference(b.lastIdx)
|
||||
}
|
||||
|
||||
func (b *builder) AttachChild(parent reference, child reference) {
|
||||
b.tree.nodes[parent].child = int(child) - int(parent)
|
||||
}
|
||||
|
||||
func (b *builder) Chain(from reference, to reference) {
|
||||
b.tree.nodes[from].next = int(to) - int(from)
|
||||
}
|
||||
|
|
@ -0,0 +1,3 @@
|
|||
// Package unstable provides APIs that do not meet the backward compatibility
|
||||
// guarantees yet.
|
||||
package unstable
|
||||
|
|
@ -1,25 +1,26 @@
|
|||
package ast
|
||||
package unstable
|
||||
|
||||
import "fmt"
|
||||
|
||||
// Kind represents the type of TOML structure contained in a given Node.
|
||||
type Kind int
|
||||
|
||||
const (
|
||||
// meta
|
||||
// Meta
|
||||
Invalid Kind = iota
|
||||
Comment
|
||||
Key
|
||||
|
||||
// top level structures
|
||||
// Top level structures
|
||||
Table
|
||||
ArrayTable
|
||||
KeyValue
|
||||
|
||||
// containers values
|
||||
// Containers values
|
||||
Array
|
||||
InlineTable
|
||||
|
||||
// values
|
||||
// Values
|
||||
String
|
||||
Bool
|
||||
Float
|
||||
|
|
@ -30,6 +31,7 @@ const (
|
|||
DateTime
|
||||
)
|
||||
|
||||
// String implementation of fmt.Stringer.
|
||||
func (k Kind) String() string {
|
||||
switch k {
|
||||
case Invalid:
|
||||
|
|
@ -1,50 +1,108 @@
|
|||
package toml
|
||||
package unstable
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"unicode"
|
||||
|
||||
"github.com/pelletier/go-toml/v2/internal/ast"
|
||||
"github.com/pelletier/go-toml/v2/internal/characters"
|
||||
"github.com/pelletier/go-toml/v2/internal/danger"
|
||||
)
|
||||
|
||||
type parser struct {
|
||||
builder ast.Builder
|
||||
ref ast.Reference
|
||||
// ParserError describes an error relative to the content of the document.
|
||||
//
|
||||
// It cannot outlive the instance of Parser it refers to, and may cause panics
|
||||
// if the parser is reset.
|
||||
type ParserError struct {
|
||||
Highlight []byte
|
||||
Message string
|
||||
Key []string // optional
|
||||
}
|
||||
|
||||
// Error is the implementation of the error interface.
|
||||
func (e *ParserError) Error() string {
|
||||
return e.Message
|
||||
}
|
||||
|
||||
// NewParserError is a convenience function to create a ParserError
|
||||
//
|
||||
// Warning: Highlight needs to be a subslice of Parser.data, so only slices
|
||||
// returned by Parser.Raw are valid candidates.
|
||||
func NewParserError(highlight []byte, format string, args ...interface{}) error {
|
||||
return &ParserError{
|
||||
Highlight: highlight,
|
||||
Message: fmt.Errorf(format, args...).Error(),
|
||||
}
|
||||
}
|
||||
|
||||
// Parser scans over a TOML-encoded document and generates an iterative AST.
|
||||
//
|
||||
// To prime the Parser, first reset it with the contents of a TOML document.
|
||||
// Then, process all top-level expressions sequentially. See Example.
|
||||
//
|
||||
// Don't forget to check Error() after you're done parsing.
|
||||
//
|
||||
// Each top-level expression needs to be fully processed before calling
|
||||
// NextExpression() again. Otherwise, calls to various Node methods may panic if
|
||||
// the parser has moved on the next expression.
|
||||
//
|
||||
// For performance reasons, go-toml doesn't make a copy of the input bytes to
|
||||
// the parser. Make sure to copy all the bytes you need to outlive the slice
|
||||
// given to the parser.
|
||||
type Parser struct {
|
||||
data []byte
|
||||
builder builder
|
||||
ref reference
|
||||
left []byte
|
||||
err error
|
||||
first bool
|
||||
|
||||
KeepComments bool
|
||||
}
|
||||
|
||||
func (p *parser) Range(b []byte) ast.Range {
|
||||
return ast.Range{
|
||||
// Data returns the slice provided to the last call to Reset.
|
||||
func (p *Parser) Data() []byte {
|
||||
return p.data
|
||||
}
|
||||
|
||||
// Range returns a range description that corresponds to a given slice of the
|
||||
// input. If the argument is not a subslice of the parser input, this function
|
||||
// panics.
|
||||
func (p *Parser) Range(b []byte) Range {
|
||||
return Range{
|
||||
Offset: uint32(danger.SubsliceOffset(p.data, b)),
|
||||
Length: uint32(len(b)),
|
||||
}
|
||||
}
|
||||
|
||||
func (p *parser) Raw(raw ast.Range) []byte {
|
||||
// Raw returns the slice corresponding to the bytes in the given range.
|
||||
func (p *Parser) Raw(raw Range) []byte {
|
||||
return p.data[raw.Offset : raw.Offset+raw.Length]
|
||||
}
|
||||
|
||||
func (p *parser) Reset(b []byte) {
|
||||
// Reset brings the parser to its initial state for a given input. It wipes an
|
||||
// reuses internal storage to reduce allocation.
|
||||
func (p *Parser) Reset(b []byte) {
|
||||
p.builder.Reset()
|
||||
p.ref = ast.InvalidReference
|
||||
p.ref = invalidReference
|
||||
p.data = b
|
||||
p.left = b
|
||||
p.err = nil
|
||||
p.first = true
|
||||
}
|
||||
|
||||
//nolint:cyclop
|
||||
func (p *parser) NextExpression() bool {
|
||||
// NextExpression parses the next top-level expression. If an expression was
|
||||
// successfully parsed, it returns true. If the parser is at the end of the
|
||||
// document or an error occurred, it returns false.
|
||||
//
|
||||
// Retrieve the parsed expression with Expression().
|
||||
func (p *Parser) NextExpression() bool {
|
||||
if len(p.left) == 0 || p.err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
p.builder.Reset()
|
||||
p.ref = ast.InvalidReference
|
||||
p.ref = invalidReference
|
||||
|
||||
for {
|
||||
if len(p.left) == 0 || p.err != nil {
|
||||
|
|
@ -73,15 +131,56 @@ func (p *parser) NextExpression() bool {
|
|||
}
|
||||
}
|
||||
|
||||
func (p *parser) Expression() *ast.Node {
|
||||
// Expression returns a pointer to the node representing the last successfully
|
||||
// parsed expression.
|
||||
func (p *Parser) Expression() *Node {
|
||||
return p.builder.NodeAt(p.ref)
|
||||
}
|
||||
|
||||
func (p *parser) Error() error {
|
||||
// Error returns any error that has occurred during parsing.
|
||||
func (p *Parser) Error() error {
|
||||
return p.err
|
||||
}
|
||||
|
||||
func (p *parser) parseNewline(b []byte) ([]byte, error) {
|
||||
// Position describes a position in the input.
|
||||
type Position struct {
|
||||
// Number of bytes from the beginning of the input.
|
||||
Offset int
|
||||
// Line number, starting at 1.
|
||||
Line int
|
||||
// Column number, starting at 1.
|
||||
Column int
|
||||
}
|
||||
|
||||
// Shape describes the position of a range in the input.
|
||||
type Shape struct {
|
||||
Start Position
|
||||
End Position
|
||||
}
|
||||
|
||||
func (p *Parser) position(b []byte) Position {
|
||||
offset := danger.SubsliceOffset(p.data, b)
|
||||
|
||||
lead := p.data[:offset]
|
||||
|
||||
return Position{
|
||||
Offset: offset,
|
||||
Line: bytes.Count(lead, []byte{'\n'}) + 1,
|
||||
Column: len(lead) - bytes.LastIndex(lead, []byte{'\n'}),
|
||||
}
|
||||
}
|
||||
|
||||
// Shape returns the shape of the given range in the input. Will
|
||||
// panic if the range is not a subslice of the input.
|
||||
func (p *Parser) Shape(r Range) Shape {
|
||||
raw := p.Raw(r)
|
||||
return Shape{
|
||||
Start: p.position(raw),
|
||||
End: p.position(raw[r.Length:]),
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Parser) parseNewline(b []byte) ([]byte, error) {
|
||||
if b[0] == '\n' {
|
||||
return b[1:], nil
|
||||
}
|
||||
|
|
@ -91,14 +190,27 @@ func (p *parser) parseNewline(b []byte) ([]byte, error) {
|
|||
return rest, err
|
||||
}
|
||||
|
||||
return nil, newDecodeError(b[0:1], "expected newline but got %#U", b[0])
|
||||
return nil, NewParserError(b[0:1], "expected newline but got %#U", b[0])
|
||||
}
|
||||
|
||||
func (p *parser) parseExpression(b []byte) (ast.Reference, []byte, error) {
|
||||
func (p *Parser) parseComment(b []byte) (reference, []byte, error) {
|
||||
ref := invalidReference
|
||||
data, rest, err := scanComment(b)
|
||||
if p.KeepComments && err == nil {
|
||||
ref = p.builder.Push(Node{
|
||||
Kind: Comment,
|
||||
Raw: p.Range(data),
|
||||
Data: data,
|
||||
})
|
||||
}
|
||||
return ref, rest, err
|
||||
}
|
||||
|
||||
func (p *Parser) parseExpression(b []byte) (reference, []byte, error) {
|
||||
// expression = ws [ comment ]
|
||||
// expression =/ ws keyval ws [ comment ]
|
||||
// expression =/ ws table ws [ comment ]
|
||||
ref := ast.InvalidReference
|
||||
ref := invalidReference
|
||||
|
||||
b = p.parseWhitespace(b)
|
||||
|
||||
|
|
@ -107,7 +219,7 @@ func (p *parser) parseExpression(b []byte) (ast.Reference, []byte, error) {
|
|||
}
|
||||
|
||||
if b[0] == '#' {
|
||||
_, rest, err := scanComment(b)
|
||||
ref, rest, err := p.parseComment(b)
|
||||
return ref, rest, err
|
||||
}
|
||||
|
||||
|
|
@ -129,14 +241,17 @@ func (p *parser) parseExpression(b []byte) (ast.Reference, []byte, error) {
|
|||
b = p.parseWhitespace(b)
|
||||
|
||||
if len(b) > 0 && b[0] == '#' {
|
||||
_, rest, err := scanComment(b)
|
||||
cref, rest, err := p.parseComment(b)
|
||||
if cref != invalidReference {
|
||||
p.builder.Chain(ref, cref)
|
||||
}
|
||||
return ref, rest, err
|
||||
}
|
||||
|
||||
return ref, b, nil
|
||||
}
|
||||
|
||||
func (p *parser) parseTable(b []byte) (ast.Reference, []byte, error) {
|
||||
func (p *Parser) parseTable(b []byte) (reference, []byte, error) {
|
||||
// table = std-table / array-table
|
||||
if len(b) > 1 && b[1] == '[' {
|
||||
return p.parseArrayTable(b)
|
||||
|
|
@ -145,12 +260,12 @@ func (p *parser) parseTable(b []byte) (ast.Reference, []byte, error) {
|
|||
return p.parseStdTable(b)
|
||||
}
|
||||
|
||||
func (p *parser) parseArrayTable(b []byte) (ast.Reference, []byte, error) {
|
||||
func (p *Parser) parseArrayTable(b []byte) (reference, []byte, error) {
|
||||
// array-table = array-table-open key array-table-close
|
||||
// array-table-open = %x5B.5B ws ; [[ Double left square bracket
|
||||
// array-table-close = ws %x5D.5D ; ]] Double right square bracket
|
||||
ref := p.builder.Push(ast.Node{
|
||||
Kind: ast.ArrayTable,
|
||||
ref := p.builder.Push(Node{
|
||||
Kind: ArrayTable,
|
||||
})
|
||||
|
||||
b = b[2:]
|
||||
|
|
@ -174,12 +289,12 @@ func (p *parser) parseArrayTable(b []byte) (ast.Reference, []byte, error) {
|
|||
return ref, b, err
|
||||
}
|
||||
|
||||
func (p *parser) parseStdTable(b []byte) (ast.Reference, []byte, error) {
|
||||
func (p *Parser) parseStdTable(b []byte) (reference, []byte, error) {
|
||||
// std-table = std-table-open key std-table-close
|
||||
// std-table-open = %x5B ws ; [ Left square bracket
|
||||
// std-table-close = ws %x5D ; ] Right square bracket
|
||||
ref := p.builder.Push(ast.Node{
|
||||
Kind: ast.Table,
|
||||
ref := p.builder.Push(Node{
|
||||
Kind: Table,
|
||||
})
|
||||
|
||||
b = b[1:]
|
||||
|
|
@ -199,15 +314,15 @@ func (p *parser) parseStdTable(b []byte) (ast.Reference, []byte, error) {
|
|||
return ref, b, err
|
||||
}
|
||||
|
||||
func (p *parser) parseKeyval(b []byte) (ast.Reference, []byte, error) {
|
||||
func (p *Parser) parseKeyval(b []byte) (reference, []byte, error) {
|
||||
// keyval = key keyval-sep val
|
||||
ref := p.builder.Push(ast.Node{
|
||||
Kind: ast.KeyValue,
|
||||
ref := p.builder.Push(Node{
|
||||
Kind: KeyValue,
|
||||
})
|
||||
|
||||
key, b, err := p.parseKey(b)
|
||||
if err != nil {
|
||||
return ast.InvalidReference, nil, err
|
||||
return invalidReference, nil, err
|
||||
}
|
||||
|
||||
// keyval-sep = ws %x3D ws ; =
|
||||
|
|
@ -215,12 +330,12 @@ func (p *parser) parseKeyval(b []byte) (ast.Reference, []byte, error) {
|
|||
b = p.parseWhitespace(b)
|
||||
|
||||
if len(b) == 0 {
|
||||
return ast.InvalidReference, nil, newDecodeError(b, "expected = after a key, but the document ends there")
|
||||
return invalidReference, nil, NewParserError(b, "expected = after a key, but the document ends there")
|
||||
}
|
||||
|
||||
b, err = expect('=', b)
|
||||
if err != nil {
|
||||
return ast.InvalidReference, nil, err
|
||||
return invalidReference, nil, err
|
||||
}
|
||||
|
||||
b = p.parseWhitespace(b)
|
||||
|
|
@ -237,12 +352,12 @@ func (p *parser) parseKeyval(b []byte) (ast.Reference, []byte, error) {
|
|||
}
|
||||
|
||||
//nolint:cyclop,funlen
|
||||
func (p *parser) parseVal(b []byte) (ast.Reference, []byte, error) {
|
||||
func (p *Parser) parseVal(b []byte) (reference, []byte, error) {
|
||||
// val = string / boolean / array / inline-table / date-time / float / integer
|
||||
ref := ast.InvalidReference
|
||||
ref := invalidReference
|
||||
|
||||
if len(b) == 0 {
|
||||
return ref, nil, newDecodeError(b, "expected value, not eof")
|
||||
return ref, nil, NewParserError(b, "expected value, not eof")
|
||||
}
|
||||
|
||||
var err error
|
||||
|
|
@ -259,8 +374,8 @@ func (p *parser) parseVal(b []byte) (ast.Reference, []byte, error) {
|
|||
}
|
||||
|
||||
if err == nil {
|
||||
ref = p.builder.Push(ast.Node{
|
||||
Kind: ast.String,
|
||||
ref = p.builder.Push(Node{
|
||||
Kind: String,
|
||||
Raw: p.Range(raw),
|
||||
Data: v,
|
||||
})
|
||||
|
|
@ -277,8 +392,8 @@ func (p *parser) parseVal(b []byte) (ast.Reference, []byte, error) {
|
|||
}
|
||||
|
||||
if err == nil {
|
||||
ref = p.builder.Push(ast.Node{
|
||||
Kind: ast.String,
|
||||
ref = p.builder.Push(Node{
|
||||
Kind: String,
|
||||
Raw: p.Range(raw),
|
||||
Data: v,
|
||||
})
|
||||
|
|
@ -287,22 +402,22 @@ func (p *parser) parseVal(b []byte) (ast.Reference, []byte, error) {
|
|||
return ref, b, err
|
||||
case 't':
|
||||
if !scanFollowsTrue(b) {
|
||||
return ref, nil, newDecodeError(atmost(b, 4), "expected 'true'")
|
||||
return ref, nil, NewParserError(atmost(b, 4), "expected 'true'")
|
||||
}
|
||||
|
||||
ref = p.builder.Push(ast.Node{
|
||||
Kind: ast.Bool,
|
||||
ref = p.builder.Push(Node{
|
||||
Kind: Bool,
|
||||
Data: b[:4],
|
||||
})
|
||||
|
||||
return ref, b[4:], nil
|
||||
case 'f':
|
||||
if !scanFollowsFalse(b) {
|
||||
return ref, nil, newDecodeError(atmost(b, 5), "expected 'false'")
|
||||
return ref, nil, NewParserError(atmost(b, 5), "expected 'false'")
|
||||
}
|
||||
|
||||
ref = p.builder.Push(ast.Node{
|
||||
Kind: ast.Bool,
|
||||
ref = p.builder.Push(Node{
|
||||
Kind: Bool,
|
||||
Data: b[:5],
|
||||
})
|
||||
|
||||
|
|
@ -324,7 +439,7 @@ func atmost(b []byte, n int) []byte {
|
|||
return b[:n]
|
||||
}
|
||||
|
||||
func (p *parser) parseLiteralString(b []byte) ([]byte, []byte, []byte, error) {
|
||||
func (p *Parser) parseLiteralString(b []byte) ([]byte, []byte, []byte, error) {
|
||||
v, rest, err := scanLiteralString(b)
|
||||
if err != nil {
|
||||
return nil, nil, nil, err
|
||||
|
|
@ -333,19 +448,20 @@ func (p *parser) parseLiteralString(b []byte) ([]byte, []byte, []byte, error) {
|
|||
return v, v[1 : len(v)-1], rest, nil
|
||||
}
|
||||
|
||||
func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) {
|
||||
func (p *Parser) parseInlineTable(b []byte) (reference, []byte, error) {
|
||||
// inline-table = inline-table-open [ inline-table-keyvals ] inline-table-close
|
||||
// inline-table-open = %x7B ws ; {
|
||||
// inline-table-close = ws %x7D ; }
|
||||
// inline-table-sep = ws %x2C ws ; , Comma
|
||||
// inline-table-keyvals = keyval [ inline-table-sep inline-table-keyvals ]
|
||||
parent := p.builder.Push(ast.Node{
|
||||
Kind: ast.InlineTable,
|
||||
parent := p.builder.Push(Node{
|
||||
Kind: InlineTable,
|
||||
Raw: p.Range(b[:1]),
|
||||
})
|
||||
|
||||
first := true
|
||||
|
||||
var child ast.Reference
|
||||
var child reference
|
||||
|
||||
b = b[1:]
|
||||
|
||||
|
|
@ -356,7 +472,7 @@ func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) {
|
|||
b = p.parseWhitespace(b)
|
||||
|
||||
if len(b) == 0 {
|
||||
return parent, nil, newDecodeError(previousB[:1], "inline table is incomplete")
|
||||
return parent, nil, NewParserError(previousB[:1], "inline table is incomplete")
|
||||
}
|
||||
|
||||
if b[0] == '}' {
|
||||
|
|
@ -371,7 +487,7 @@ func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) {
|
|||
b = p.parseWhitespace(b)
|
||||
}
|
||||
|
||||
var kv ast.Reference
|
||||
var kv reference
|
||||
|
||||
kv, b, err = p.parseKeyval(b)
|
||||
if err != nil {
|
||||
|
|
@ -394,7 +510,7 @@ func (p *parser) parseInlineTable(b []byte) (ast.Reference, []byte, error) {
|
|||
}
|
||||
|
||||
//nolint:funlen,cyclop
|
||||
func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
|
||||
func (p *Parser) parseValArray(b []byte) (reference, []byte, error) {
|
||||
// array = array-open [ array-values ] ws-comment-newline array-close
|
||||
// array-open = %x5B ; [
|
||||
// array-close = %x5D ; ]
|
||||
|
|
@ -405,23 +521,39 @@ func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
|
|||
arrayStart := b
|
||||
b = b[1:]
|
||||
|
||||
parent := p.builder.Push(ast.Node{
|
||||
Kind: ast.Array,
|
||||
parent := p.builder.Push(Node{
|
||||
Kind: Array,
|
||||
})
|
||||
|
||||
// First indicates whether the parser is looking for the first element
|
||||
// (non-comment) of the array.
|
||||
first := true
|
||||
|
||||
var lastChild ast.Reference
|
||||
lastChild := invalidReference
|
||||
|
||||
addChild := func(valueRef reference) {
|
||||
if lastChild == invalidReference {
|
||||
p.builder.AttachChild(parent, valueRef)
|
||||
} else {
|
||||
p.builder.Chain(lastChild, valueRef)
|
||||
}
|
||||
lastChild = valueRef
|
||||
}
|
||||
|
||||
var err error
|
||||
for len(b) > 0 {
|
||||
b, err = p.parseOptionalWhitespaceCommentNewline(b)
|
||||
cref := invalidReference
|
||||
cref, b, err = p.parseOptionalWhitespaceCommentNewline(b)
|
||||
if err != nil {
|
||||
return parent, nil, err
|
||||
}
|
||||
|
||||
if cref != invalidReference {
|
||||
addChild(cref)
|
||||
}
|
||||
|
||||
if len(b) == 0 {
|
||||
return parent, nil, newDecodeError(arrayStart[:1], "array is incomplete")
|
||||
return parent, nil, NewParserError(arrayStart[:1], "array is incomplete")
|
||||
}
|
||||
|
||||
if b[0] == ']' {
|
||||
|
|
@ -430,16 +562,19 @@ func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
|
|||
|
||||
if b[0] == ',' {
|
||||
if first {
|
||||
return parent, nil, newDecodeError(b[0:1], "array cannot start with comma")
|
||||
return parent, nil, NewParserError(b[0:1], "array cannot start with comma")
|
||||
}
|
||||
b = b[1:]
|
||||
|
||||
b, err = p.parseOptionalWhitespaceCommentNewline(b)
|
||||
cref, b, err = p.parseOptionalWhitespaceCommentNewline(b)
|
||||
if err != nil {
|
||||
return parent, nil, err
|
||||
}
|
||||
if cref != invalidReference {
|
||||
addChild(cref)
|
||||
}
|
||||
} else if !first {
|
||||
return parent, nil, newDecodeError(b[0:1], "array elements must be separated by commas")
|
||||
return parent, nil, NewParserError(b[0:1], "array elements must be separated by commas")
|
||||
}
|
||||
|
||||
// TOML allows trailing commas in arrays.
|
||||
|
|
@ -447,23 +582,22 @@ func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
|
|||
break
|
||||
}
|
||||
|
||||
var valueRef ast.Reference
|
||||
var valueRef reference
|
||||
valueRef, b, err = p.parseVal(b)
|
||||
if err != nil {
|
||||
return parent, nil, err
|
||||
}
|
||||
|
||||
if first {
|
||||
p.builder.AttachChild(parent, valueRef)
|
||||
} else {
|
||||
p.builder.Chain(lastChild, valueRef)
|
||||
}
|
||||
lastChild = valueRef
|
||||
addChild(valueRef)
|
||||
|
||||
b, err = p.parseOptionalWhitespaceCommentNewline(b)
|
||||
cref, b, err = p.parseOptionalWhitespaceCommentNewline(b)
|
||||
if err != nil {
|
||||
return parent, nil, err
|
||||
}
|
||||
if cref != invalidReference {
|
||||
addChild(cref)
|
||||
}
|
||||
|
||||
first = false
|
||||
}
|
||||
|
||||
|
|
@ -472,15 +606,34 @@ func (p *parser) parseValArray(b []byte) (ast.Reference, []byte, error) {
|
|||
return parent, rest, err
|
||||
}
|
||||
|
||||
func (p *parser) parseOptionalWhitespaceCommentNewline(b []byte) ([]byte, error) {
|
||||
func (p *Parser) parseOptionalWhitespaceCommentNewline(b []byte) (reference, []byte, error) {
|
||||
rootCommentRef := invalidReference
|
||||
latestCommentRef := invalidReference
|
||||
|
||||
addComment := func(ref reference) {
|
||||
if rootCommentRef == invalidReference {
|
||||
rootCommentRef = ref
|
||||
} else if latestCommentRef == invalidReference {
|
||||
p.builder.AttachChild(rootCommentRef, ref)
|
||||
latestCommentRef = ref
|
||||
} else {
|
||||
p.builder.Chain(latestCommentRef, ref)
|
||||
latestCommentRef = ref
|
||||
}
|
||||
}
|
||||
|
||||
for len(b) > 0 {
|
||||
var err error
|
||||
b = p.parseWhitespace(b)
|
||||
|
||||
if len(b) > 0 && b[0] == '#' {
|
||||
_, b, err = scanComment(b)
|
||||
var ref reference
|
||||
ref, b, err = p.parseComment(b)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return invalidReference, nil, err
|
||||
}
|
||||
if ref != invalidReference {
|
||||
addComment(ref)
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -491,17 +644,17 @@ func (p *parser) parseOptionalWhitespaceCommentNewline(b []byte) ([]byte, error)
|
|||
if b[0] == '\n' || b[0] == '\r' {
|
||||
b, err = p.parseNewline(b)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return invalidReference, nil, err
|
||||
}
|
||||
} else {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return b, nil
|
||||
return rootCommentRef, b, nil
|
||||
}
|
||||
|
||||
func (p *parser) parseMultilineLiteralString(b []byte) ([]byte, []byte, []byte, error) {
|
||||
func (p *Parser) parseMultilineLiteralString(b []byte) ([]byte, []byte, []byte, error) {
|
||||
token, rest, err := scanMultilineLiteralString(b)
|
||||
if err != nil {
|
||||
return nil, nil, nil, err
|
||||
|
|
@ -520,7 +673,7 @@ func (p *parser) parseMultilineLiteralString(b []byte) ([]byte, []byte, []byte,
|
|||
}
|
||||
|
||||
//nolint:funlen,gocognit,cyclop
|
||||
func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, error) {
|
||||
func (p *Parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, error) {
|
||||
// ml-basic-string = ml-basic-string-delim [ newline ] ml-basic-body
|
||||
// ml-basic-string-delim
|
||||
// ml-basic-string-delim = 3quotation-mark
|
||||
|
|
@ -551,11 +704,11 @@ func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, er
|
|||
|
||||
if !escaped {
|
||||
str := token[startIdx:endIdx]
|
||||
verr := utf8TomlValidAlreadyEscaped(str)
|
||||
verr := characters.Utf8TomlValidAlreadyEscaped(str)
|
||||
if verr.Zero() {
|
||||
return token, str, rest, nil
|
||||
}
|
||||
return nil, nil, nil, newDecodeError(str[verr.Index:verr.Index+verr.Size], "invalid UTF-8")
|
||||
return nil, nil, nil, NewParserError(str[verr.Index:verr.Index+verr.Size], "invalid UTF-8")
|
||||
}
|
||||
|
||||
var builder bytes.Buffer
|
||||
|
|
@ -635,13 +788,13 @@ func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, er
|
|||
builder.WriteRune(x)
|
||||
i += 8
|
||||
default:
|
||||
return nil, nil, nil, newDecodeError(token[i:i+1], "invalid escaped character %#U", c)
|
||||
return nil, nil, nil, NewParserError(token[i:i+1], "invalid escaped character %#U", c)
|
||||
}
|
||||
i++
|
||||
} else {
|
||||
size := utf8ValidNext(token[i:])
|
||||
size := characters.Utf8ValidNext(token[i:])
|
||||
if size == 0 {
|
||||
return nil, nil, nil, newDecodeError(token[i:i+1], "invalid character %#U", c)
|
||||
return nil, nil, nil, NewParserError(token[i:i+1], "invalid character %#U", c)
|
||||
}
|
||||
builder.Write(token[i : i+size])
|
||||
i += size
|
||||
|
|
@ -651,7 +804,7 @@ func (p *parser) parseMultilineBasicString(b []byte) ([]byte, []byte, []byte, er
|
|||
return token, builder.Bytes(), rest, nil
|
||||
}
|
||||
|
||||
func (p *parser) parseKey(b []byte) (ast.Reference, []byte, error) {
|
||||
func (p *Parser) parseKey(b []byte) (reference, []byte, error) {
|
||||
// key = simple-key / dotted-key
|
||||
// simple-key = quoted-key / unquoted-key
|
||||
//
|
||||
|
|
@ -662,11 +815,11 @@ func (p *parser) parseKey(b []byte) (ast.Reference, []byte, error) {
|
|||
// dot-sep = ws %x2E ws ; . Period
|
||||
raw, key, b, err := p.parseSimpleKey(b)
|
||||
if err != nil {
|
||||
return ast.InvalidReference, nil, err
|
||||
return invalidReference, nil, err
|
||||
}
|
||||
|
||||
ref := p.builder.Push(ast.Node{
|
||||
Kind: ast.Key,
|
||||
ref := p.builder.Push(Node{
|
||||
Kind: Key,
|
||||
Raw: p.Range(raw),
|
||||
Data: key,
|
||||
})
|
||||
|
|
@ -681,8 +834,8 @@ func (p *parser) parseKey(b []byte) (ast.Reference, []byte, error) {
|
|||
return ref, nil, err
|
||||
}
|
||||
|
||||
p.builder.PushAndChain(ast.Node{
|
||||
Kind: ast.Key,
|
||||
p.builder.PushAndChain(Node{
|
||||
Kind: Key,
|
||||
Raw: p.Range(raw),
|
||||
Data: key,
|
||||
})
|
||||
|
|
@ -694,9 +847,9 @@ func (p *parser) parseKey(b []byte) (ast.Reference, []byte, error) {
|
|||
return ref, b, nil
|
||||
}
|
||||
|
||||
func (p *parser) parseSimpleKey(b []byte) (raw, key, rest []byte, err error) {
|
||||
func (p *Parser) parseSimpleKey(b []byte) (raw, key, rest []byte, err error) {
|
||||
if len(b) == 0 {
|
||||
return nil, nil, nil, newDecodeError(b, "expected key but found none")
|
||||
return nil, nil, nil, NewParserError(b, "expected key but found none")
|
||||
}
|
||||
|
||||
// simple-key = quoted-key / unquoted-key
|
||||
|
|
@ -711,12 +864,12 @@ func (p *parser) parseSimpleKey(b []byte) (raw, key, rest []byte, err error) {
|
|||
key, rest = scanUnquotedKey(b)
|
||||
return key, key, rest, nil
|
||||
default:
|
||||
return nil, nil, nil, newDecodeError(b[0:1], "invalid character at start of key: %c", b[0])
|
||||
return nil, nil, nil, NewParserError(b[0:1], "invalid character at start of key: %c", b[0])
|
||||
}
|
||||
}
|
||||
|
||||
//nolint:funlen,cyclop
|
||||
func (p *parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
|
||||
func (p *Parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
|
||||
// basic-string = quotation-mark *basic-char quotation-mark
|
||||
// quotation-mark = %x22 ; "
|
||||
// basic-char = basic-unescaped / escaped
|
||||
|
|
@ -744,11 +897,11 @@ func (p *parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
|
|||
// validate the string and return a direct reference to the buffer.
|
||||
if !escaped {
|
||||
str := token[startIdx:endIdx]
|
||||
verr := utf8TomlValidAlreadyEscaped(str)
|
||||
verr := characters.Utf8TomlValidAlreadyEscaped(str)
|
||||
if verr.Zero() {
|
||||
return token, str, rest, nil
|
||||
}
|
||||
return nil, nil, nil, newDecodeError(str[verr.Index:verr.Index+verr.Size], "invalid UTF-8")
|
||||
return nil, nil, nil, NewParserError(str[verr.Index:verr.Index+verr.Size], "invalid UTF-8")
|
||||
}
|
||||
|
||||
i := startIdx
|
||||
|
|
@ -795,13 +948,13 @@ func (p *parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
|
|||
builder.WriteRune(x)
|
||||
i += 8
|
||||
default:
|
||||
return nil, nil, nil, newDecodeError(token[i:i+1], "invalid escaped character %#U", c)
|
||||
return nil, nil, nil, NewParserError(token[i:i+1], "invalid escaped character %#U", c)
|
||||
}
|
||||
i++
|
||||
} else {
|
||||
size := utf8ValidNext(token[i:])
|
||||
size := characters.Utf8ValidNext(token[i:])
|
||||
if size == 0 {
|
||||
return nil, nil, nil, newDecodeError(token[i:i+1], "invalid character %#U", c)
|
||||
return nil, nil, nil, NewParserError(token[i:i+1], "invalid character %#U", c)
|
||||
}
|
||||
builder.Write(token[i : i+size])
|
||||
i += size
|
||||
|
|
@ -813,7 +966,7 @@ func (p *parser) parseBasicString(b []byte) ([]byte, []byte, []byte, error) {
|
|||
|
||||
func hexToRune(b []byte, length int) (rune, error) {
|
||||
if len(b) < length {
|
||||
return -1, newDecodeError(b, "unicode point needs %d character, not %d", length, len(b))
|
||||
return -1, NewParserError(b, "unicode point needs %d character, not %d", length, len(b))
|
||||
}
|
||||
b = b[:length]
|
||||
|
||||
|
|
@ -828,19 +981,19 @@ func hexToRune(b []byte, length int) (rune, error) {
|
|||
case 'A' <= c && c <= 'F':
|
||||
d = uint32(c - 'A' + 10)
|
||||
default:
|
||||
return -1, newDecodeError(b[i:i+1], "non-hex character")
|
||||
return -1, NewParserError(b[i:i+1], "non-hex character")
|
||||
}
|
||||
r = r*16 + d
|
||||
}
|
||||
|
||||
if r > unicode.MaxRune || 0xD800 <= r && r < 0xE000 {
|
||||
return -1, newDecodeError(b, "escape sequence is invalid Unicode code point")
|
||||
return -1, NewParserError(b, "escape sequence is invalid Unicode code point")
|
||||
}
|
||||
|
||||
return rune(r), nil
|
||||
}
|
||||
|
||||
func (p *parser) parseWhitespace(b []byte) []byte {
|
||||
func (p *Parser) parseWhitespace(b []byte) []byte {
|
||||
// ws = *wschar
|
||||
// wschar = %x20 ; Space
|
||||
// wschar =/ %x09 ; Horizontal tab
|
||||
|
|
@ -850,24 +1003,24 @@ func (p *parser) parseWhitespace(b []byte) []byte {
|
|||
}
|
||||
|
||||
//nolint:cyclop
|
||||
func (p *parser) parseIntOrFloatOrDateTime(b []byte) (ast.Reference, []byte, error) {
|
||||
func (p *Parser) parseIntOrFloatOrDateTime(b []byte) (reference, []byte, error) {
|
||||
switch b[0] {
|
||||
case 'i':
|
||||
if !scanFollowsInf(b) {
|
||||
return ast.InvalidReference, nil, newDecodeError(atmost(b, 3), "expected 'inf'")
|
||||
return invalidReference, nil, NewParserError(atmost(b, 3), "expected 'inf'")
|
||||
}
|
||||
|
||||
return p.builder.Push(ast.Node{
|
||||
Kind: ast.Float,
|
||||
return p.builder.Push(Node{
|
||||
Kind: Float,
|
||||
Data: b[:3],
|
||||
}), b[3:], nil
|
||||
case 'n':
|
||||
if !scanFollowsNan(b) {
|
||||
return ast.InvalidReference, nil, newDecodeError(atmost(b, 3), "expected 'nan'")
|
||||
return invalidReference, nil, NewParserError(atmost(b, 3), "expected 'nan'")
|
||||
}
|
||||
|
||||
return p.builder.Push(ast.Node{
|
||||
Kind: ast.Float,
|
||||
return p.builder.Push(Node{
|
||||
Kind: Float,
|
||||
Data: b[:3],
|
||||
}), b[3:], nil
|
||||
case '+', '-':
|
||||
|
|
@ -898,7 +1051,7 @@ func (p *parser) parseIntOrFloatOrDateTime(b []byte) (ast.Reference, []byte, err
|
|||
return p.scanIntOrFloat(b)
|
||||
}
|
||||
|
||||
func (p *parser) scanDateTime(b []byte) (ast.Reference, []byte, error) {
|
||||
func (p *Parser) scanDateTime(b []byte) (reference, []byte, error) {
|
||||
// scans for contiguous characters in [0-9T:Z.+-], and up to one space if
|
||||
// followed by a digit.
|
||||
hasDate := false
|
||||
|
|
@ -941,30 +1094,30 @@ byteLoop:
|
|||
}
|
||||
}
|
||||
|
||||
var kind ast.Kind
|
||||
var kind Kind
|
||||
|
||||
if hasTime {
|
||||
if hasDate {
|
||||
if hasTz {
|
||||
kind = ast.DateTime
|
||||
kind = DateTime
|
||||
} else {
|
||||
kind = ast.LocalDateTime
|
||||
kind = LocalDateTime
|
||||
}
|
||||
} else {
|
||||
kind = ast.LocalTime
|
||||
kind = LocalTime
|
||||
}
|
||||
} else {
|
||||
kind = ast.LocalDate
|
||||
kind = LocalDate
|
||||
}
|
||||
|
||||
return p.builder.Push(ast.Node{
|
||||
return p.builder.Push(Node{
|
||||
Kind: kind,
|
||||
Data: b[:i],
|
||||
}), b[i:], nil
|
||||
}
|
||||
|
||||
//nolint:funlen,gocognit,cyclop
|
||||
func (p *parser) scanIntOrFloat(b []byte) (ast.Reference, []byte, error) {
|
||||
func (p *Parser) scanIntOrFloat(b []byte) (reference, []byte, error) {
|
||||
i := 0
|
||||
|
||||
if len(b) > 2 && b[0] == '0' && b[1] != '.' && b[1] != 'e' && b[1] != 'E' {
|
||||
|
|
@ -990,8 +1143,8 @@ func (p *parser) scanIntOrFloat(b []byte) (ast.Reference, []byte, error) {
|
|||
}
|
||||
}
|
||||
|
||||
return p.builder.Push(ast.Node{
|
||||
Kind: ast.Integer,
|
||||
return p.builder.Push(Node{
|
||||
Kind: Integer,
|
||||
Data: b[:i],
|
||||
}), b[i:], nil
|
||||
}
|
||||
|
|
@ -1013,40 +1166,40 @@ func (p *parser) scanIntOrFloat(b []byte) (ast.Reference, []byte, error) {
|
|||
|
||||
if c == 'i' {
|
||||
if scanFollowsInf(b[i:]) {
|
||||
return p.builder.Push(ast.Node{
|
||||
Kind: ast.Float,
|
||||
return p.builder.Push(Node{
|
||||
Kind: Float,
|
||||
Data: b[:i+3],
|
||||
}), b[i+3:], nil
|
||||
}
|
||||
|
||||
return ast.InvalidReference, nil, newDecodeError(b[i:i+1], "unexpected character 'i' while scanning for a number")
|
||||
return invalidReference, nil, NewParserError(b[i:i+1], "unexpected character 'i' while scanning for a number")
|
||||
}
|
||||
|
||||
if c == 'n' {
|
||||
if scanFollowsNan(b[i:]) {
|
||||
return p.builder.Push(ast.Node{
|
||||
Kind: ast.Float,
|
||||
return p.builder.Push(Node{
|
||||
Kind: Float,
|
||||
Data: b[:i+3],
|
||||
}), b[i+3:], nil
|
||||
}
|
||||
|
||||
return ast.InvalidReference, nil, newDecodeError(b[i:i+1], "unexpected character 'n' while scanning for a number")
|
||||
return invalidReference, nil, NewParserError(b[i:i+1], "unexpected character 'n' while scanning for a number")
|
||||
}
|
||||
|
||||
break
|
||||
}
|
||||
|
||||
if i == 0 {
|
||||
return ast.InvalidReference, b, newDecodeError(b, "incomplete number")
|
||||
return invalidReference, b, NewParserError(b, "incomplete number")
|
||||
}
|
||||
|
||||
kind := ast.Integer
|
||||
kind := Integer
|
||||
|
||||
if isFloat {
|
||||
kind = ast.Float
|
||||
kind = Float
|
||||
}
|
||||
|
||||
return p.builder.Push(ast.Node{
|
||||
return p.builder.Push(Node{
|
||||
Kind: kind,
|
||||
Data: b[:i],
|
||||
}), b[i:], nil
|
||||
|
|
@ -1075,11 +1228,11 @@ func isValidBinaryRune(r byte) bool {
|
|||
|
||||
func expect(x byte, b []byte) ([]byte, error) {
|
||||
if len(b) == 0 {
|
||||
return nil, newDecodeError(b, "expected character %c but the document ended here", x)
|
||||
return nil, NewParserError(b, "expected character %c but the document ended here", x)
|
||||
}
|
||||
|
||||
if b[0] != x {
|
||||
return nil, newDecodeError(b[0:1], "expected character %c", x)
|
||||
return nil, NewParserError(b[0:1], "expected character %c", x)
|
||||
}
|
||||
|
||||
return b[1:], nil
|
||||
|
|
@ -1,4 +1,6 @@
|
|||
package toml
|
||||
package unstable
|
||||
|
||||
import "github.com/pelletier/go-toml/v2/internal/characters"
|
||||
|
||||
func scanFollows(b []byte, pattern string) bool {
|
||||
n := len(pattern)
|
||||
|
|
@ -54,16 +56,16 @@ func scanLiteralString(b []byte) ([]byte, []byte, error) {
|
|||
case '\'':
|
||||
return b[:i+1], b[i+1:], nil
|
||||
case '\n', '\r':
|
||||
return nil, nil, newDecodeError(b[i:i+1], "literal strings cannot have new lines")
|
||||
return nil, nil, NewParserError(b[i:i+1], "literal strings cannot have new lines")
|
||||
}
|
||||
size := utf8ValidNext(b[i:])
|
||||
size := characters.Utf8ValidNext(b[i:])
|
||||
if size == 0 {
|
||||
return nil, nil, newDecodeError(b[i:i+1], "invalid character")
|
||||
return nil, nil, NewParserError(b[i:i+1], "invalid character")
|
||||
}
|
||||
i += size
|
||||
}
|
||||
|
||||
return nil, nil, newDecodeError(b[len(b):], "unterminated literal string")
|
||||
return nil, nil, NewParserError(b[len(b):], "unterminated literal string")
|
||||
}
|
||||
|
||||
func scanMultilineLiteralString(b []byte) ([]byte, []byte, error) {
|
||||
|
|
@ -98,39 +100,39 @@ func scanMultilineLiteralString(b []byte) ([]byte, []byte, error) {
|
|||
i++
|
||||
|
||||
if i < len(b) && b[i] == '\'' {
|
||||
return nil, nil, newDecodeError(b[i-3:i+1], "''' not allowed in multiline literal string")
|
||||
return nil, nil, NewParserError(b[i-3:i+1], "''' not allowed in multiline literal string")
|
||||
}
|
||||
|
||||
return b[:i], b[i:], nil
|
||||
}
|
||||
case '\r':
|
||||
if len(b) < i+2 {
|
||||
return nil, nil, newDecodeError(b[len(b):], `need a \n after \r`)
|
||||
return nil, nil, NewParserError(b[len(b):], `need a \n after \r`)
|
||||
}
|
||||
if b[i+1] != '\n' {
|
||||
return nil, nil, newDecodeError(b[i:i+2], `need a \n after \r`)
|
||||
return nil, nil, NewParserError(b[i:i+2], `need a \n after \r`)
|
||||
}
|
||||
i += 2 // skip the \n
|
||||
continue
|
||||
}
|
||||
size := utf8ValidNext(b[i:])
|
||||
size := characters.Utf8ValidNext(b[i:])
|
||||
if size == 0 {
|
||||
return nil, nil, newDecodeError(b[i:i+1], "invalid character")
|
||||
return nil, nil, NewParserError(b[i:i+1], "invalid character")
|
||||
}
|
||||
i += size
|
||||
}
|
||||
|
||||
return nil, nil, newDecodeError(b[len(b):], `multiline literal string not terminated by '''`)
|
||||
return nil, nil, NewParserError(b[len(b):], `multiline literal string not terminated by '''`)
|
||||
}
|
||||
|
||||
func scanWindowsNewline(b []byte) ([]byte, []byte, error) {
|
||||
const lenCRLF = 2
|
||||
if len(b) < lenCRLF {
|
||||
return nil, nil, newDecodeError(b, "windows new line expected")
|
||||
return nil, nil, NewParserError(b, "windows new line expected")
|
||||
}
|
||||
|
||||
if b[1] != '\n' {
|
||||
return nil, nil, newDecodeError(b, `windows new line should be \r\n`)
|
||||
return nil, nil, NewParserError(b, `windows new line should be \r\n`)
|
||||
}
|
||||
|
||||
return b[:lenCRLF], b[lenCRLF:], nil
|
||||
|
|
@ -149,7 +151,6 @@ func scanWhitespace(b []byte) ([]byte, []byte) {
|
|||
return b, b[len(b):]
|
||||
}
|
||||
|
||||
//nolint:unparam
|
||||
func scanComment(b []byte) ([]byte, []byte, error) {
|
||||
// comment-start-symbol = %x23 ; #
|
||||
// non-ascii = %x80-D7FF / %xE000-10FFFF
|
||||
|
|
@ -165,11 +166,11 @@ func scanComment(b []byte) ([]byte, []byte, error) {
|
|||
if i+1 < len(b) && b[i+1] == '\n' {
|
||||
return b[:i+1], b[i+1:], nil
|
||||
}
|
||||
return nil, nil, newDecodeError(b[i:i+1], "invalid character in comment")
|
||||
return nil, nil, NewParserError(b[i:i+1], "invalid character in comment")
|
||||
}
|
||||
size := utf8ValidNext(b[i:])
|
||||
size := characters.Utf8ValidNext(b[i:])
|
||||
if size == 0 {
|
||||
return nil, nil, newDecodeError(b[i:i+1], "invalid character in comment")
|
||||
return nil, nil, NewParserError(b[i:i+1], "invalid character in comment")
|
||||
}
|
||||
|
||||
i += size
|
||||
|
|
@ -192,17 +193,17 @@ func scanBasicString(b []byte) ([]byte, bool, []byte, error) {
|
|||
case '"':
|
||||
return b[:i+1], escaped, b[i+1:], nil
|
||||
case '\n', '\r':
|
||||
return nil, escaped, nil, newDecodeError(b[i:i+1], "basic strings cannot have new lines")
|
||||
return nil, escaped, nil, NewParserError(b[i:i+1], "basic strings cannot have new lines")
|
||||
case '\\':
|
||||
if len(b) < i+2 {
|
||||
return nil, escaped, nil, newDecodeError(b[i:i+1], "need a character after \\")
|
||||
return nil, escaped, nil, NewParserError(b[i:i+1], "need a character after \\")
|
||||
}
|
||||
escaped = true
|
||||
i++ // skip the next character
|
||||
}
|
||||
}
|
||||
|
||||
return nil, escaped, nil, newDecodeError(b[len(b):], `basic string not terminated by "`)
|
||||
return nil, escaped, nil, NewParserError(b[len(b):], `basic string not terminated by "`)
|
||||
}
|
||||
|
||||
func scanMultilineBasicString(b []byte) ([]byte, bool, []byte, error) {
|
||||
|
|
@ -243,27 +244,27 @@ func scanMultilineBasicString(b []byte) ([]byte, bool, []byte, error) {
|
|||
i++
|
||||
|
||||
if i < len(b) && b[i] == '"' {
|
||||
return nil, escaped, nil, newDecodeError(b[i-3:i+1], `""" not allowed in multiline basic string`)
|
||||
return nil, escaped, nil, NewParserError(b[i-3:i+1], `""" not allowed in multiline basic string`)
|
||||
}
|
||||
|
||||
return b[:i], escaped, b[i:], nil
|
||||
}
|
||||
case '\\':
|
||||
if len(b) < i+2 {
|
||||
return nil, escaped, nil, newDecodeError(b[len(b):], "need a character after \\")
|
||||
return nil, escaped, nil, NewParserError(b[len(b):], "need a character after \\")
|
||||
}
|
||||
escaped = true
|
||||
i++ // skip the next character
|
||||
case '\r':
|
||||
if len(b) < i+2 {
|
||||
return nil, escaped, nil, newDecodeError(b[len(b):], `need a \n after \r`)
|
||||
return nil, escaped, nil, NewParserError(b[len(b):], `need a \n after \r`)
|
||||
}
|
||||
if b[i+1] != '\n' {
|
||||
return nil, escaped, nil, newDecodeError(b[i:i+2], `need a \n after \r`)
|
||||
return nil, escaped, nil, NewParserError(b[i:i+2], `need a \n after \r`)
|
||||
}
|
||||
i++ // skip the \n
|
||||
}
|
||||
}
|
||||
|
||||
return nil, escaped, nil, newDecodeError(b[len(b):], `multiline basic string not terminated by """`)
|
||||
return nil, escaped, nil, NewParserError(b[len(b):], `multiline basic string not terminated by """`)
|
||||
}
|
||||
|
|
@ -97,7 +97,7 @@ type Fs interface {
|
|||
// Chown changes the uid and gid of the named file.
|
||||
Chown(name string, uid, gid int) error
|
||||
|
||||
//Chtimes changes the access and modification times of the named file
|
||||
// Chtimes changes the access and modification times of the named file
|
||||
Chtimes(name string, atime time.Time, mtime time.Time) error
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -40,7 +40,6 @@ func (f *BasePathFile) Name() string {
|
|||
func (f *BasePathFile) ReadDir(n int) ([]fs.DirEntry, error) {
|
||||
if rdf, ok := f.File.(fs.ReadDirFile); ok {
|
||||
return rdf.ReadDir(n)
|
||||
|
||||
}
|
||||
return readDirFile{f.File}.ReadDir(n)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -223,7 +223,7 @@ func (u *CopyOnWriteFs) OpenFile(name string, flag int, perm os.FileMode) (File,
|
|||
return nil, err
|
||||
}
|
||||
if isaDir {
|
||||
if err = u.layer.MkdirAll(dir, 0777); err != nil {
|
||||
if err = u.layer.MkdirAll(dir, 0o777); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return u.layer.OpenFile(name, flag, perm)
|
||||
|
|
@ -247,8 +247,9 @@ func (u *CopyOnWriteFs) OpenFile(name string, flag int, perm os.FileMode) (File,
|
|||
|
||||
// This function handles the 9 different possibilities caused
|
||||
// by the union which are the intersection of the following...
|
||||
// layer: doesn't exist, exists as a file, and exists as a directory
|
||||
// base: doesn't exist, exists as a file, and exists as a directory
|
||||
//
|
||||
// layer: doesn't exist, exists as a file, and exists as a directory
|
||||
// base: doesn't exist, exists as a file, and exists as a directory
|
||||
func (u *CopyOnWriteFs) Open(name string) (File, error) {
|
||||
// Since the overlay overrides the base we check that first
|
||||
b, err := u.isBaseFile(name)
|
||||
|
|
@ -322,5 +323,5 @@ func (u *CopyOnWriteFs) MkdirAll(name string, perm os.FileMode) error {
|
|||
}
|
||||
|
||||
func (u *CopyOnWriteFs) Create(name string) (File, error) {
|
||||
return u.OpenFile(name, os.O_CREATE|os.O_TRUNC|os.O_RDWR, 0666)
|
||||
return u.OpenFile(name, os.O_CREATE|os.O_TRUNC|os.O_RDWR, 0o666)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -141,8 +141,10 @@ func WriteFile(fs Fs, filename string, data []byte, perm os.FileMode) error {
|
|||
// We generate random temporary file names so that there's a good
|
||||
// chance the file doesn't exist yet - keeps the number of tries in
|
||||
// TempFile to a minimum.
|
||||
var randNum uint32
|
||||
var randmu sync.Mutex
|
||||
var (
|
||||
randNum uint32
|
||||
randmu sync.Mutex
|
||||
)
|
||||
|
||||
func reseed() uint32 {
|
||||
return uint32(time.Now().UnixNano() + int64(os.Getpid()))
|
||||
|
|
@ -190,7 +192,7 @@ func TempFile(fs Fs, dir, pattern string) (f File, err error) {
|
|||
nconflict := 0
|
||||
for i := 0; i < 10000; i++ {
|
||||
name := filepath.Join(dir, prefix+nextRandom()+suffix)
|
||||
f, err = fs.OpenFile(name, os.O_RDWR|os.O_CREATE|os.O_EXCL, 0600)
|
||||
f, err = fs.OpenFile(name, os.O_RDWR|os.O_CREATE|os.O_EXCL, 0o600)
|
||||
if os.IsExist(err) {
|
||||
if nconflict++; nconflict > 10 {
|
||||
randmu.Lock()
|
||||
|
|
@ -214,6 +216,7 @@ func TempFile(fs Fs, dir, pattern string) (f File, err error) {
|
|||
func (a Afero) TempDir(dir, prefix string) (name string, err error) {
|
||||
return TempDir(a.Fs, dir, prefix)
|
||||
}
|
||||
|
||||
func TempDir(fs Fs, dir, prefix string) (name string, err error) {
|
||||
if dir == "" {
|
||||
dir = os.TempDir()
|
||||
|
|
@ -222,7 +225,7 @@ func TempDir(fs Fs, dir, prefix string) (name string, err error) {
|
|||
nconflict := 0
|
||||
for i := 0; i < 10000; i++ {
|
||||
try := filepath.Join(dir, prefix+nextRandom())
|
||||
err = fs.Mkdir(try, 0700)
|
||||
err = fs.Mkdir(try, 0o700)
|
||||
if os.IsExist(err) {
|
||||
if nconflict++; nconflict > 10 {
|
||||
randmu.Lock()
|
||||
|
|
|
|||
|
|
@ -245,7 +245,7 @@ func (f *File) Truncate(size int64) error {
|
|||
defer f.fileData.Unlock()
|
||||
if size > int64(len(f.fileData.data)) {
|
||||
diff := size - int64(len(f.fileData.data))
|
||||
f.fileData.data = append(f.fileData.data, bytes.Repeat([]byte{00}, int(diff))...)
|
||||
f.fileData.data = append(f.fileData.data, bytes.Repeat([]byte{0o0}, int(diff))...)
|
||||
} else {
|
||||
f.fileData.data = f.fileData.data[0:size]
|
||||
}
|
||||
|
|
@ -285,7 +285,7 @@ func (f *File) Write(b []byte) (n int, err error) {
|
|||
tail = f.fileData.data[n+int(cur):]
|
||||
}
|
||||
if diff > 0 {
|
||||
f.fileData.data = append(f.fileData.data, append(bytes.Repeat([]byte{00}, int(diff)), b...)...)
|
||||
f.fileData.data = append(f.fileData.data, append(bytes.Repeat([]byte{0o0}, int(diff)), b...)...)
|
||||
f.fileData.data = append(f.fileData.data, tail...)
|
||||
} else {
|
||||
f.fileData.data = append(f.fileData.data[:cur], b...)
|
||||
|
|
@ -321,16 +321,19 @@ func (s *FileInfo) Name() string {
|
|||
s.Unlock()
|
||||
return name
|
||||
}
|
||||
|
||||
func (s *FileInfo) Mode() os.FileMode {
|
||||
s.Lock()
|
||||
defer s.Unlock()
|
||||
return s.mode
|
||||
}
|
||||
|
||||
func (s *FileInfo) ModTime() time.Time {
|
||||
s.Lock()
|
||||
defer s.Unlock()
|
||||
return s.modtime
|
||||
}
|
||||
|
||||
func (s *FileInfo) IsDir() bool {
|
||||
s.Lock()
|
||||
defer s.Unlock()
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ package afero
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
|
@ -43,7 +44,7 @@ func (m *MemMapFs) getData() map[string]*mem.FileData {
|
|||
// Root should always exist, right?
|
||||
// TODO: what about windows?
|
||||
root := mem.CreateDir(FilePathSeparator)
|
||||
mem.SetMode(root, os.ModeDir|0755)
|
||||
mem.SetMode(root, os.ModeDir|0o755)
|
||||
m.data[FilePathSeparator] = root
|
||||
})
|
||||
return m.data
|
||||
|
|
@ -96,12 +97,12 @@ func (m *MemMapFs) registerWithParent(f *mem.FileData, perm os.FileMode) {
|
|||
pdir := filepath.Dir(filepath.Clean(f.Name()))
|
||||
err := m.lockfreeMkdir(pdir, perm)
|
||||
if err != nil {
|
||||
//log.Println("Mkdir error:", err)
|
||||
// log.Println("Mkdir error:", err)
|
||||
return
|
||||
}
|
||||
parent, err = m.lockfreeOpen(pdir)
|
||||
if err != nil {
|
||||
//log.Println("Open after Mkdir error:", err)
|
||||
// log.Println("Open after Mkdir error:", err)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
|
@ -142,6 +143,11 @@ func (m *MemMapFs) Mkdir(name string, perm os.FileMode) error {
|
|||
}
|
||||
|
||||
m.mu.Lock()
|
||||
// Dobule check that it doesn't exist.
|
||||
if _, ok := m.getData()[name]; ok {
|
||||
m.mu.Unlock()
|
||||
return &os.PathError{Op: "mkdir", Path: name, Err: ErrFileExists}
|
||||
}
|
||||
item := mem.CreateDir(name)
|
||||
mem.SetMode(item, os.ModeDir|perm)
|
||||
m.getData()[name] = item
|
||||
|
|
@ -232,7 +238,7 @@ func (m *MemMapFs) OpenFile(name string, flag int, perm os.FileMode) (File, erro
|
|||
file = mem.NewReadOnlyFileHandle(file.(*mem.File).Data())
|
||||
}
|
||||
if flag&os.O_APPEND > 0 {
|
||||
_, err = file.Seek(0, os.SEEK_END)
|
||||
_, err = file.Seek(0, io.SeekEnd)
|
||||
if err != nil {
|
||||
file.Close()
|
||||
return nil, err
|
||||
|
|
@ -314,6 +320,18 @@ func (m *MemMapFs) Rename(oldname, newname string) error {
|
|||
} else {
|
||||
return &os.PathError{Op: "rename", Path: oldname, Err: ErrFileNotFound}
|
||||
}
|
||||
|
||||
for p, fileData := range m.getData() {
|
||||
if strings.HasPrefix(p, oldname+FilePathSeparator) {
|
||||
m.mu.RUnlock()
|
||||
m.mu.Lock()
|
||||
delete(m.getData(), p)
|
||||
p := strings.Replace(p, oldname, newname, 1)
|
||||
m.getData()[p] = fileData
|
||||
m.mu.Unlock()
|
||||
m.mu.RLock()
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -10,7 +10,6 @@ import (
|
|||
// The RegexpFs filters files (not directories) by regular expression. Only
|
||||
// files matching the given regexp will be allowed, all others get a ENOENT error (
|
||||
// "No such file or directory").
|
||||
//
|
||||
type RegexpFs struct {
|
||||
re *regexp.Regexp
|
||||
source Fs
|
||||
|
|
|
|||
|
|
@ -21,9 +21,9 @@ import (
|
|||
// filesystems saying so.
|
||||
// It indicates support for 3 symlink related interfaces that implement the
|
||||
// behaviors of the os methods:
|
||||
// - Lstat
|
||||
// - Symlink, and
|
||||
// - Readlink
|
||||
// - Lstat
|
||||
// - Symlink, and
|
||||
// - Readlink
|
||||
type Symlinker interface {
|
||||
Lstater
|
||||
Linker
|
||||
|
|
|
|||
|
|
@ -47,7 +47,7 @@ func (f *UnionFile) Read(s []byte) (int, error) {
|
|||
if (err == nil || err == io.EOF) && f.Base != nil {
|
||||
// advance the file position also in the base file, the next
|
||||
// call may be a write at this position (or a seek with SEEK_CUR)
|
||||
if _, seekErr := f.Base.Seek(int64(n), os.SEEK_CUR); seekErr != nil {
|
||||
if _, seekErr := f.Base.Seek(int64(n), io.SeekCurrent); seekErr != nil {
|
||||
// only overwrite err in case the seek fails: we need to
|
||||
// report an eventual io.EOF to the caller
|
||||
err = seekErr
|
||||
|
|
@ -130,7 +130,7 @@ func (f *UnionFile) Name() string {
|
|||
type DirsMerger func(lofi, bofi []os.FileInfo) ([]os.FileInfo, error)
|
||||
|
||||
var defaultUnionMergeDirsFn = func(lofi, bofi []os.FileInfo) ([]os.FileInfo, error) {
|
||||
var files = make(map[string]os.FileInfo)
|
||||
files := make(map[string]os.FileInfo)
|
||||
|
||||
for _, fi := range lofi {
|
||||
files[fi.Name()] = fi
|
||||
|
|
@ -151,7 +151,6 @@ var defaultUnionMergeDirsFn = func(lofi, bofi []os.FileInfo) ([]os.FileInfo, err
|
|||
}
|
||||
|
||||
return rfi, nil
|
||||
|
||||
}
|
||||
|
||||
// Readdir will weave the two directories together and
|
||||
|
|
@ -275,7 +274,7 @@ func copyFile(base Fs, layer Fs, name string, bfh File) error {
|
|||
return err
|
||||
}
|
||||
if !exists {
|
||||
err = layer.MkdirAll(filepath.Dir(name), 0777) // FIXME?
|
||||
err = layer.MkdirAll(filepath.Dir(name), 0o777) // FIXME?
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -43,7 +43,7 @@ func WriteReader(fs Fs, path string, r io.Reader) (err error) {
|
|||
ospath := filepath.FromSlash(dir)
|
||||
|
||||
if ospath != "" {
|
||||
err = fs.MkdirAll(ospath, 0777) // rwx, rw, r
|
||||
err = fs.MkdirAll(ospath, 0o777) // rwx, rw, r
|
||||
if err != nil {
|
||||
if err != os.ErrExist {
|
||||
return err
|
||||
|
|
@ -71,7 +71,7 @@ func SafeWriteReader(fs Fs, path string, r io.Reader) (err error) {
|
|||
ospath := filepath.FromSlash(dir)
|
||||
|
||||
if ospath != "" {
|
||||
err = fs.MkdirAll(ospath, 0777) // rwx, rw, r
|
||||
err = fs.MkdirAll(ospath, 0o777) // rwx, rw, r
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
|
@ -124,7 +124,7 @@ func GetTempDir(fs Fs, subPath string) string {
|
|||
return addSlash(dir)
|
||||
}
|
||||
|
||||
err := fs.MkdirAll(dir, 0777)
|
||||
err := fs.MkdirAll(dir, 0o777)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
|
@ -197,7 +197,6 @@ func FileContainsAnyBytes(fs Fs, filename string, subslices [][]byte) (bool, err
|
|||
|
||||
// readerContains reports whether any of the subslices is within r.
|
||||
func readerContainsAny(r io.Reader, subslices ...[]byte) bool {
|
||||
|
||||
if r == nil || len(subslices) == 0 {
|
||||
return false
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,8 @@
|
|||
cast
|
||||
====
|
||||
[](https://godoc.org/github.com/spf13/cast)
|
||||
[](https://github.com/spf13/cast/actions/workflows/go.yml)
|
||||
# cast
|
||||
|
||||
[](https://github.com/spf13/cast/actions/workflows/ci.yml)
|
||||
[](https://pkg.go.dev/mod/github.com/spf13/cast)
|
||||

|
||||
[](https://goreportcard.com/report/github.com/spf13/cast)
|
||||
|
||||
Easy and safe casting from one type to another in Go
|
||||
|
|
@ -17,7 +18,7 @@ interface into a bool, etc. Cast does this intelligently when an obvious
|
|||
conversion is possible. It doesn’t make any attempts to guess what you meant,
|
||||
for example you can only convert a string to an int when it is a string
|
||||
representation of an int such as “8”. Cast was developed for use in
|
||||
[Hugo](http://hugo.spf13.com), a website engine which uses YAML, TOML or JSON
|
||||
[Hugo](https://gohugo.io), a website engine which uses YAML, TOML or JSON
|
||||
for meta data.
|
||||
|
||||
## Why use Cast?
|
||||
|
|
@ -72,4 +73,3 @@ the code for a complete set.
|
|||
var eight interface{} = 8
|
||||
cast.ToInt(eight) // 8
|
||||
cast.ToInt(nil) // 0
|
||||
|
||||
|
|
|
|||
|
|
@ -98,10 +98,31 @@ func ToBoolE(i interface{}) (bool, error) {
|
|||
case nil:
|
||||
return false, nil
|
||||
case int:
|
||||
if i.(int) != 0 {
|
||||
return true, nil
|
||||
}
|
||||
return false, nil
|
||||
return b != 0, nil
|
||||
case int64:
|
||||
return b != 0, nil
|
||||
case int32:
|
||||
return b != 0, nil
|
||||
case int16:
|
||||
return b != 0, nil
|
||||
case int8:
|
||||
return b != 0, nil
|
||||
case uint:
|
||||
return b != 0, nil
|
||||
case uint64:
|
||||
return b != 0, nil
|
||||
case uint32:
|
||||
return b != 0, nil
|
||||
case uint16:
|
||||
return b != 0, nil
|
||||
case uint8:
|
||||
return b != 0, nil
|
||||
case float64:
|
||||
return b != 0, nil
|
||||
case float32:
|
||||
return b != 0, nil
|
||||
case time.Duration:
|
||||
return b != 0, nil
|
||||
case string:
|
||||
return strconv.ParseBool(i.(string))
|
||||
case json.Number:
|
||||
|
|
@ -1385,6 +1406,8 @@ func (f timeFormat) hasTimezone() bool {
|
|||
|
||||
var (
|
||||
timeFormats = []timeFormat{
|
||||
// Keep common formats at the top.
|
||||
{"2006-01-02", timeFormatNoTimezone},
|
||||
{time.RFC3339, timeFormatNumericTimezone},
|
||||
{"2006-01-02T15:04:05", timeFormatNoTimezone}, // iso8601 without timezone
|
||||
{time.RFC1123Z, timeFormatNumericTimezone},
|
||||
|
|
@ -1400,7 +1423,6 @@ var (
|
|||
{time.UnixDate, timeFormatNamedTimezone},
|
||||
{time.RubyDate, timeFormatNumericTimezone},
|
||||
{"2006-01-02 15:04:05Z07:00", timeFormatNumericTimezone},
|
||||
{"2006-01-02", timeFormatNoTimezone},
|
||||
{"02 Jan 2006", timeFormatNoTimezone},
|
||||
{"2006-01-02 15:04:05 -07:00", timeFormatNumericTimezone},
|
||||
{"2006-01-02 15:04:05 -0700", timeFormatNumericTimezone},
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
# Copyright 2013-2022 The Cobra Authors
|
||||
# Copyright 2013-2023 The Cobra Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
|
|
|
|||
|
|
@ -5,10 +5,6 @@ ifeq (, $(shell which golangci-lint))
|
|||
$(warning "could not find golangci-lint in $(PATH), run: curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sh")
|
||||
endif
|
||||
|
||||
ifeq (, $(shell which richgo))
|
||||
$(warning "could not find richgo in $(PATH), run: go install github.com/kyoh86/richgo@latest")
|
||||
endif
|
||||
|
||||
.PHONY: fmt lint test install_deps clean
|
||||
|
||||
default: all
|
||||
|
|
@ -25,6 +21,10 @@ lint:
|
|||
|
||||
test: install_deps
|
||||
$(info ******************** running tests ********************)
|
||||
go test -v ./...
|
||||
|
||||
richtest: install_deps
|
||||
$(info ******************** running tests with kyoh86/richgo ********************)
|
||||
richgo test -v ./...
|
||||
|
||||
install_deps:
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||

|
||||

|
||||
|
||||
Cobra is a library for creating powerful modern CLI applications.
|
||||
|
||||
|
|
@ -6,7 +6,7 @@ Cobra is used in many Go projects such as [Kubernetes](https://kubernetes.io/),
|
|||
[Hugo](https://gohugo.io), and [GitHub CLI](https://github.com/cli/cli) to
|
||||
name a few. [This list](./projects_using_cobra.md) contains a more extensive list of projects using Cobra.
|
||||
|
||||
[](https://github.com/spf13/cobra/actions?query=workflow%3ATest)
|
||||
[](https://github.com/spf13/cobra/actions?query=workflow%3ATest)
|
||||
[](https://pkg.go.dev/github.com/spf13/cobra)
|
||||
[](https://goreportcard.com/report/github.com/spf13/cobra)
|
||||
[](https://gophers.slack.com/archives/CD3LP1199)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2013-2022 The Cobra Authors
|
||||
// Copyright 2013-2023 The Cobra Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2013-2022 The Cobra Authors
|
||||
// Copyright 2013-2023 The Cobra Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
|
@ -21,7 +21,7 @@ import (
|
|||
|
||||
type PositionalArgs func(cmd *Command, args []string) error
|
||||
|
||||
// Legacy arg validation has the following behaviour:
|
||||
// legacyArgs validation has the following behaviour:
|
||||
// - root commands with no subcommands can take arbitrary arguments
|
||||
// - root commands with subcommands will do subcommand validity checking
|
||||
// - subcommands will always accept arbitrary arguments
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2013-2022 The Cobra Authors
|
||||
// Copyright 2013-2023 The Cobra Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
|
@ -532,7 +532,7 @@ func writeLocalNonPersistentFlag(buf io.StringWriter, flag *pflag.Flag) {
|
|||
}
|
||||
}
|
||||
|
||||
// Setup annotations for go completions for registered flags
|
||||
// prepareCustomAnnotationsForFlags setup annotations for go completions for registered flags
|
||||
func prepareCustomAnnotationsForFlags(cmd *Command) {
|
||||
flagCompletionMutex.RLock()
|
||||
defer flagCompletionMutex.RUnlock()
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2013-2022 The Cobra Authors
|
||||
// Copyright 2013-2023 The Cobra Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
|
@ -38,7 +38,7 @@ func genBashComp(buf io.StringWriter, name string, includeDesc bool) {
|
|||
|
||||
__%[1]s_debug()
|
||||
{
|
||||
if [[ -n ${BASH_COMP_DEBUG_FILE:-} ]]; then
|
||||
if [[ -n ${BASH_COMP_DEBUG_FILE-} ]]; then
|
||||
echo "$*" >> "${BASH_COMP_DEBUG_FILE}"
|
||||
fi
|
||||
}
|
||||
|
|
@ -65,7 +65,7 @@ __%[1]s_get_completion_results() {
|
|||
lastChar=${lastParam:$((${#lastParam}-1)):1}
|
||||
__%[1]s_debug "lastParam ${lastParam}, lastChar ${lastChar}"
|
||||
|
||||
if [ -z "${cur}" ] && [ "${lastChar}" != "=" ]; then
|
||||
if [[ -z ${cur} && ${lastChar} != = ]]; then
|
||||
# If the last parameter is complete (there is a space following it)
|
||||
# We add an extra empty parameter so we can indicate this to the go method.
|
||||
__%[1]s_debug "Adding extra empty parameter"
|
||||
|
|
@ -75,7 +75,7 @@ __%[1]s_get_completion_results() {
|
|||
# When completing a flag with an = (e.g., %[1]s -n=<TAB>)
|
||||
# bash focuses on the part after the =, so we need to remove
|
||||
# the flag part from $cur
|
||||
if [[ "${cur}" == -*=* ]]; then
|
||||
if [[ ${cur} == -*=* ]]; then
|
||||
cur="${cur#*=}"
|
||||
fi
|
||||
|
||||
|
|
@ -87,7 +87,7 @@ __%[1]s_get_completion_results() {
|
|||
directive=${out##*:}
|
||||
# Remove the directive
|
||||
out=${out%%:*}
|
||||
if [ "${directive}" = "${out}" ]; then
|
||||
if [[ ${directive} == "${out}" ]]; then
|
||||
# There is not directive specified
|
||||
directive=0
|
||||
fi
|
||||
|
|
@ -101,22 +101,36 @@ __%[1]s_process_completion_results() {
|
|||
local shellCompDirectiveNoFileComp=%[5]d
|
||||
local shellCompDirectiveFilterFileExt=%[6]d
|
||||
local shellCompDirectiveFilterDirs=%[7]d
|
||||
local shellCompDirectiveKeepOrder=%[8]d
|
||||
|
||||
if [ $((directive & shellCompDirectiveError)) -ne 0 ]; then
|
||||
if (((directive & shellCompDirectiveError) != 0)); then
|
||||
# Error code. No completion.
|
||||
__%[1]s_debug "Received error from custom completion go code"
|
||||
return
|
||||
else
|
||||
if [ $((directive & shellCompDirectiveNoSpace)) -ne 0 ]; then
|
||||
if [[ $(type -t compopt) = "builtin" ]]; then
|
||||
if (((directive & shellCompDirectiveNoSpace) != 0)); then
|
||||
if [[ $(type -t compopt) == builtin ]]; then
|
||||
__%[1]s_debug "Activating no space"
|
||||
compopt -o nospace
|
||||
else
|
||||
__%[1]s_debug "No space directive not supported in this version of bash"
|
||||
fi
|
||||
fi
|
||||
if [ $((directive & shellCompDirectiveNoFileComp)) -ne 0 ]; then
|
||||
if [[ $(type -t compopt) = "builtin" ]]; then
|
||||
if (((directive & shellCompDirectiveKeepOrder) != 0)); then
|
||||
if [[ $(type -t compopt) == builtin ]]; then
|
||||
# no sort isn't supported for bash less than < 4.4
|
||||
if [[ ${BASH_VERSINFO[0]} -lt 4 || ( ${BASH_VERSINFO[0]} -eq 4 && ${BASH_VERSINFO[1]} -lt 4 ) ]]; then
|
||||
__%[1]s_debug "No sort directive not supported in this version of bash"
|
||||
else
|
||||
__%[1]s_debug "Activating keep order"
|
||||
compopt -o nosort
|
||||
fi
|
||||
else
|
||||
__%[1]s_debug "No sort directive not supported in this version of bash"
|
||||
fi
|
||||
fi
|
||||
if (((directive & shellCompDirectiveNoFileComp) != 0)); then
|
||||
if [[ $(type -t compopt) == builtin ]]; then
|
||||
__%[1]s_debug "Activating no file completion"
|
||||
compopt +o default
|
||||
else
|
||||
|
|
@ -130,7 +144,7 @@ __%[1]s_process_completion_results() {
|
|||
local activeHelp=()
|
||||
__%[1]s_extract_activeHelp
|
||||
|
||||
if [ $((directive & shellCompDirectiveFilterFileExt)) -ne 0 ]; then
|
||||
if (((directive & shellCompDirectiveFilterFileExt) != 0)); then
|
||||
# File extension filtering
|
||||
local fullFilter filter filteringCmd
|
||||
|
||||
|
|
@ -143,13 +157,12 @@ __%[1]s_process_completion_results() {
|
|||
filteringCmd="_filedir $fullFilter"
|
||||
__%[1]s_debug "File filtering command: $filteringCmd"
|
||||
$filteringCmd
|
||||
elif [ $((directive & shellCompDirectiveFilterDirs)) -ne 0 ]; then
|
||||
elif (((directive & shellCompDirectiveFilterDirs) != 0)); then
|
||||
# File completion for directories only
|
||||
|
||||
# Use printf to strip any trailing newline
|
||||
local subdir
|
||||
subdir=$(printf "%%s" "${completions[0]}")
|
||||
if [ -n "$subdir" ]; then
|
||||
subdir=${completions[0]}
|
||||
if [[ -n $subdir ]]; then
|
||||
__%[1]s_debug "Listing directories in $subdir"
|
||||
pushd "$subdir" >/dev/null 2>&1 && _filedir -d && popd >/dev/null 2>&1 || return
|
||||
else
|
||||
|
|
@ -164,7 +177,7 @@ __%[1]s_process_completion_results() {
|
|||
__%[1]s_handle_special_char "$cur" =
|
||||
|
||||
# Print the activeHelp statements before we finish
|
||||
if [ ${#activeHelp[*]} -ne 0 ]; then
|
||||
if ((${#activeHelp[*]} != 0)); then
|
||||
printf "\n";
|
||||
printf "%%s\n" "${activeHelp[@]}"
|
||||
printf "\n"
|
||||
|
|
@ -184,21 +197,21 @@ __%[1]s_process_completion_results() {
|
|||
# Separate activeHelp lines from real completions.
|
||||
# Fills the $activeHelp and $completions arrays.
|
||||
__%[1]s_extract_activeHelp() {
|
||||
local activeHelpMarker="%[8]s"
|
||||
local activeHelpMarker="%[9]s"
|
||||
local endIndex=${#activeHelpMarker}
|
||||
|
||||
while IFS='' read -r comp; do
|
||||
if [ "${comp:0:endIndex}" = "$activeHelpMarker" ]; then
|
||||
if [[ ${comp:0:endIndex} == $activeHelpMarker ]]; then
|
||||
comp=${comp:endIndex}
|
||||
__%[1]s_debug "ActiveHelp found: $comp"
|
||||
if [ -n "$comp" ]; then
|
||||
if [[ -n $comp ]]; then
|
||||
activeHelp+=("$comp")
|
||||
fi
|
||||
else
|
||||
# Not an activeHelp line but a normal completion
|
||||
completions+=("$comp")
|
||||
fi
|
||||
done < <(printf "%%s\n" "${out}")
|
||||
done <<<"${out}"
|
||||
}
|
||||
|
||||
__%[1]s_handle_completion_types() {
|
||||
|
|
@ -254,7 +267,7 @@ __%[1]s_handle_standard_completion_case() {
|
|||
done < <(printf "%%s\n" "${completions[@]}")
|
||||
|
||||
# If there is a single completion left, remove the description text
|
||||
if [ ${#COMPREPLY[*]} -eq 1 ]; then
|
||||
if ((${#COMPREPLY[*]} == 1)); then
|
||||
__%[1]s_debug "COMPREPLY[0]: ${COMPREPLY[0]}"
|
||||
comp="${COMPREPLY[0]%%%%$tab*}"
|
||||
__%[1]s_debug "Removed description from single completion, which is now: ${comp}"
|
||||
|
|
@ -271,8 +284,8 @@ __%[1]s_handle_special_char()
|
|||
if [[ "$comp" == *${char}* && "$COMP_WORDBREAKS" == *${char}* ]]; then
|
||||
local word=${comp%%"${comp##*${char}}"}
|
||||
local idx=${#COMPREPLY[*]}
|
||||
while [[ $((--idx)) -ge 0 ]]; do
|
||||
COMPREPLY[$idx]=${COMPREPLY[$idx]#"$word"}
|
||||
while ((--idx >= 0)); do
|
||||
COMPREPLY[idx]=${COMPREPLY[idx]#"$word"}
|
||||
done
|
||||
fi
|
||||
}
|
||||
|
|
@ -298,7 +311,7 @@ __%[1]s_format_comp_descriptions()
|
|||
|
||||
# Make sure we can fit a description of at least 8 characters
|
||||
# if we are to align the descriptions.
|
||||
if [[ $maxdesclength -gt 8 ]]; then
|
||||
if ((maxdesclength > 8)); then
|
||||
# Add the proper number of spaces to align the descriptions
|
||||
for ((i = ${#comp} ; i < longest ; i++)); do
|
||||
comp+=" "
|
||||
|
|
@ -310,8 +323,8 @@ __%[1]s_format_comp_descriptions()
|
|||
|
||||
# If there is enough space for any description text,
|
||||
# truncate the descriptions that are too long for the shell width
|
||||
if [ $maxdesclength -gt 0 ]; then
|
||||
if [ ${#desc} -gt $maxdesclength ]; then
|
||||
if ((maxdesclength > 0)); then
|
||||
if ((${#desc} > maxdesclength)); then
|
||||
desc=${desc:0:$(( maxdesclength - 1 ))}
|
||||
desc+="…"
|
||||
fi
|
||||
|
|
@ -332,9 +345,9 @@ __start_%[1]s()
|
|||
# Call _init_completion from the bash-completion package
|
||||
# to prepare the arguments properly
|
||||
if declare -F _init_completion >/dev/null 2>&1; then
|
||||
_init_completion -n "=:" || return
|
||||
_init_completion -n =: || return
|
||||
else
|
||||
__%[1]s_init_completion -n "=:" || return
|
||||
__%[1]s_init_completion -n =: || return
|
||||
fi
|
||||
|
||||
__%[1]s_debug
|
||||
|
|
@ -361,7 +374,7 @@ fi
|
|||
# ex: ts=4 sw=4 et filetype=sh
|
||||
`, name, compCmd,
|
||||
ShellCompDirectiveError, ShellCompDirectiveNoSpace, ShellCompDirectiveNoFileComp,
|
||||
ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs,
|
||||
ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs, ShellCompDirectiveKeepOrder,
|
||||
activeHelpMarker))
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2013-2022 The Cobra Authors
|
||||
// Copyright 2013-2023 The Cobra Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
|
@ -167,8 +167,8 @@ func appendIfNotPresent(s, stringToAppend string) string {
|
|||
|
||||
// rpad adds padding to the right of a string.
|
||||
func rpad(s string, padding int) string {
|
||||
template := fmt.Sprintf("%%-%ds", padding)
|
||||
return fmt.Sprintf(template, s)
|
||||
formattedString := fmt.Sprintf("%%-%ds", padding)
|
||||
return fmt.Sprintf(formattedString, s)
|
||||
}
|
||||
|
||||
// tmpl executes the given template text on data, writing the result to w.
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2013-2022 The Cobra Authors
|
||||
// Copyright 2013-2023 The Cobra Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
|
@ -35,7 +35,7 @@ const FlagSetByCobraAnnotation = "cobra_annotation_flag_set_by_cobra"
|
|||
// FParseErrWhitelist configures Flag parse errors to be ignored
|
||||
type FParseErrWhitelist flag.ParseErrorsWhitelist
|
||||
|
||||
// Structure to manage groups for commands
|
||||
// Group Structure to manage groups for commands
|
||||
type Group struct {
|
||||
ID string
|
||||
Title string
|
||||
|
|
@ -47,7 +47,7 @@ type Group struct {
|
|||
// definition to ensure usability.
|
||||
type Command struct {
|
||||
// Use is the one-line usage message.
|
||||
// Recommended syntax is as follow:
|
||||
// Recommended syntax is as follows:
|
||||
// [ ] identifies an optional argument. Arguments that are not enclosed in brackets are required.
|
||||
// ... indicates that you can specify multiple values for the previous argument.
|
||||
// | indicates mutually exclusive information. You can use the argument to the left of the separator or the
|
||||
|
|
@ -321,7 +321,7 @@ func (c *Command) SetHelpCommand(cmd *Command) {
|
|||
c.helpCommand = cmd
|
||||
}
|
||||
|
||||
// SetHelpCommandGroup sets the group id of the help command.
|
||||
// SetHelpCommandGroupID sets the group id of the help command.
|
||||
func (c *Command) SetHelpCommandGroupID(groupID string) {
|
||||
if c.helpCommand != nil {
|
||||
c.helpCommand.GroupID = groupID
|
||||
|
|
@ -330,7 +330,7 @@ func (c *Command) SetHelpCommandGroupID(groupID string) {
|
|||
c.helpCommandGroupID = groupID
|
||||
}
|
||||
|
||||
// SetCompletionCommandGroup sets the group id of the completion command.
|
||||
// SetCompletionCommandGroupID sets the group id of the completion command.
|
||||
func (c *Command) SetCompletionCommandGroupID(groupID string) {
|
||||
// completionCommandGroupID is used if no completion command is defined by the user
|
||||
c.Root().completionCommandGroupID = groupID
|
||||
|
|
@ -655,20 +655,44 @@ Loop:
|
|||
|
||||
// argsMinusFirstX removes only the first x from args. Otherwise, commands that look like
|
||||
// openshift admin policy add-role-to-user admin my-user, lose the admin argument (arg[4]).
|
||||
func argsMinusFirstX(args []string, x string) []string {
|
||||
for i, y := range args {
|
||||
if x == y {
|
||||
ret := []string{}
|
||||
ret = append(ret, args[:i]...)
|
||||
ret = append(ret, args[i+1:]...)
|
||||
return ret
|
||||
// Special care needs to be taken not to remove a flag value.
|
||||
func (c *Command) argsMinusFirstX(args []string, x string) []string {
|
||||
if len(args) == 0 {
|
||||
return args
|
||||
}
|
||||
c.mergePersistentFlags()
|
||||
flags := c.Flags()
|
||||
|
||||
Loop:
|
||||
for pos := 0; pos < len(args); pos++ {
|
||||
s := args[pos]
|
||||
switch {
|
||||
case s == "--":
|
||||
// -- means we have reached the end of the parseable args. Break out of the loop now.
|
||||
break Loop
|
||||
case strings.HasPrefix(s, "--") && !strings.Contains(s, "=") && !hasNoOptDefVal(s[2:], flags):
|
||||
fallthrough
|
||||
case strings.HasPrefix(s, "-") && !strings.Contains(s, "=") && len(s) == 2 && !shortHasNoOptDefVal(s[1:], flags):
|
||||
// This is a flag without a default value, and an equal sign is not used. Increment pos in order to skip
|
||||
// over the next arg, because that is the value of this flag.
|
||||
pos++
|
||||
continue
|
||||
case !strings.HasPrefix(s, "-"):
|
||||
// This is not a flag or a flag value. Check to see if it matches what we're looking for, and if so,
|
||||
// return the args, excluding the one at this position.
|
||||
if s == x {
|
||||
ret := []string{}
|
||||
ret = append(ret, args[:pos]...)
|
||||
ret = append(ret, args[pos+1:]...)
|
||||
return ret
|
||||
}
|
||||
}
|
||||
}
|
||||
return args
|
||||
}
|
||||
|
||||
func isFlagArg(arg string) bool {
|
||||
return ((len(arg) >= 3 && arg[1] == '-') ||
|
||||
return ((len(arg) >= 3 && arg[0:2] == "--") ||
|
||||
(len(arg) >= 2 && arg[0] == '-' && arg[1] != '-'))
|
||||
}
|
||||
|
||||
|
|
@ -686,7 +710,7 @@ func (c *Command) Find(args []string) (*Command, []string, error) {
|
|||
|
||||
cmd := c.findNext(nextSubCmd)
|
||||
if cmd != nil {
|
||||
return innerfind(cmd, argsMinusFirstX(innerArgs, nextSubCmd))
|
||||
return innerfind(cmd, c.argsMinusFirstX(innerArgs, nextSubCmd))
|
||||
}
|
||||
return c, innerArgs
|
||||
}
|
||||
|
|
@ -998,6 +1022,10 @@ func (c *Command) ExecuteC() (cmd *Command, err error) {
|
|||
// initialize completion at the last point to allow for user overriding
|
||||
c.InitDefaultCompletionCmd()
|
||||
|
||||
// Now that all commands have been created, let's make sure all groups
|
||||
// are properly created also
|
||||
c.checkCommandGroups()
|
||||
|
||||
args := c.args
|
||||
|
||||
// Workaround FAIL with "go test -v" or "cobra.test -test.v", see #155
|
||||
|
|
@ -1092,6 +1120,19 @@ func (c *Command) ValidateRequiredFlags() error {
|
|||
return nil
|
||||
}
|
||||
|
||||
// checkCommandGroups checks if a command has been added to a group that does not exists.
|
||||
// If so, we panic because it indicates a coding error that should be corrected.
|
||||
func (c *Command) checkCommandGroups() {
|
||||
for _, sub := range c.commands {
|
||||
// if Group is not defined let the developer know right away
|
||||
if sub.GroupID != "" && !c.ContainsGroup(sub.GroupID) {
|
||||
panic(fmt.Sprintf("group id '%s' is not defined for subcommand '%s'", sub.GroupID, sub.CommandPath()))
|
||||
}
|
||||
|
||||
sub.checkCommandGroups()
|
||||
}
|
||||
}
|
||||
|
||||
// InitDefaultHelpFlag adds default help flag to c.
|
||||
// It is called automatically by executing the c or by calling help and usage.
|
||||
// If c already has help flag, it will do nothing.
|
||||
|
|
@ -1218,10 +1259,6 @@ func (c *Command) AddCommand(cmds ...*Command) {
|
|||
panic("Command can't be a child of itself")
|
||||
}
|
||||
cmds[i].parent = c
|
||||
// if Group is not defined let the developer know right away
|
||||
if x.GroupID != "" && !c.ContainsGroup(x.GroupID) {
|
||||
panic(fmt.Sprintf("Group id '%s' is not defined for subcommand '%s'", x.GroupID, cmds[i].CommandPath()))
|
||||
}
|
||||
// update max lengths
|
||||
usageLen := len(x.Use)
|
||||
if usageLen > c.commandsMaxUseLen {
|
||||
|
|
@ -1259,7 +1296,7 @@ func (c *Command) AllChildCommandsHaveGroup() bool {
|
|||
return true
|
||||
}
|
||||
|
||||
// ContainGroups return if groupID exists in the list of command groups.
|
||||
// ContainsGroup return if groupID exists in the list of command groups.
|
||||
func (c *Command) ContainsGroup(groupID string) bool {
|
||||
for _, x := range c.commandgroups {
|
||||
if x.ID == groupID {
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2013-2022 The Cobra Authors
|
||||
// Copyright 2013-2023 The Cobra Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2013-2022 The Cobra Authors
|
||||
// Copyright 2013-2023 The Cobra Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2013-2022 The Cobra Authors
|
||||
// Copyright 2013-2023 The Cobra Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
|
@ -77,6 +77,10 @@ const (
|
|||
// obtain the same behavior but only for flags.
|
||||
ShellCompDirectiveFilterDirs
|
||||
|
||||
// ShellCompDirectiveKeepOrder indicates that the shell should preserve the order
|
||||
// in which the completions are provided
|
||||
ShellCompDirectiveKeepOrder
|
||||
|
||||
// ===========================================================================
|
||||
|
||||
// All directives using iota should be above this one.
|
||||
|
|
@ -159,6 +163,9 @@ func (d ShellCompDirective) string() string {
|
|||
if d&ShellCompDirectiveFilterDirs != 0 {
|
||||
directives = append(directives, "ShellCompDirectiveFilterDirs")
|
||||
}
|
||||
if d&ShellCompDirectiveKeepOrder != 0 {
|
||||
directives = append(directives, "ShellCompDirectiveKeepOrder")
|
||||
}
|
||||
if len(directives) == 0 {
|
||||
directives = append(directives, "ShellCompDirectiveDefault")
|
||||
}
|
||||
|
|
@ -169,7 +176,7 @@ func (d ShellCompDirective) string() string {
|
|||
return strings.Join(directives, ", ")
|
||||
}
|
||||
|
||||
// Adds a special hidden command that can be used to request custom completions.
|
||||
// initCompleteCmd adds a special hidden command that can be used to request custom completions.
|
||||
func (c *Command) initCompleteCmd(args []string) {
|
||||
completeCmd := &Command{
|
||||
Use: fmt.Sprintf("%s [command-line]", ShellCompRequestCmd),
|
||||
|
|
@ -727,7 +734,7 @@ to enable it. You can execute the following once:
|
|||
|
||||
To load completions in your current shell session:
|
||||
|
||||
source <(%[1]s completion zsh); compdef _%[1]s %[1]s
|
||||
source <(%[1]s completion zsh)
|
||||
|
||||
To load completions for every new session, execute once:
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2013-2022 The Cobra Authors
|
||||
// Copyright 2013-2023 The Cobra Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2013-2022 The Cobra Authors
|
||||
// Copyright 2013-2023 The Cobra Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2013-2022 The Cobra Authors
|
||||
// Copyright 2013-2023 The Cobra Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
|
@ -48,7 +48,7 @@ func printOptionsReST(buf *bytes.Buffer, cmd *cobra.Command, name string) error
|
|||
return nil
|
||||
}
|
||||
|
||||
// linkHandler for default ReST hyperlink markup
|
||||
// defaultLinkHandler for default ReST hyperlink markup
|
||||
func defaultLinkHandler(name, ref string) string {
|
||||
return fmt.Sprintf("`%s <%s.rst>`_", name, ref)
|
||||
}
|
||||
|
|
@ -169,7 +169,7 @@ func GenReSTTreeCustom(cmd *cobra.Command, dir string, filePrepender func(string
|
|||
return nil
|
||||
}
|
||||
|
||||
// adapted from: https://github.com/kr/text/blob/main/indent.go
|
||||
// indentString adapted from: https://github.com/kr/text/blob/main/indent.go
|
||||
func indentString(s, p string) string {
|
||||
var res []byte
|
||||
b := []byte(s)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2013-2022 The Cobra Authors
|
||||
// Copyright 2013-2023 The Cobra Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2013-2022 The Cobra Authors
|
||||
// Copyright 2013-2023 The Cobra Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2013-2022 The Cobra Authors
|
||||
// Copyright 2013-2023 The Cobra Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
|
@ -53,7 +53,7 @@ function __%[1]s_perform_completion
|
|||
__%[1]s_debug "last arg: $lastArg"
|
||||
|
||||
# Disable ActiveHelp which is not supported for fish shell
|
||||
set -l requestComp "%[9]s=0 $args[1] %[3]s $args[2..-1] $lastArg"
|
||||
set -l requestComp "%[10]s=0 $args[1] %[3]s $args[2..-1] $lastArg"
|
||||
|
||||
__%[1]s_debug "Calling $requestComp"
|
||||
set -l results (eval $requestComp 2> /dev/null)
|
||||
|
|
@ -89,6 +89,60 @@ function __%[1]s_perform_completion
|
|||
printf "%%s\n" "$directiveLine"
|
||||
end
|
||||
|
||||
# this function limits calls to __%[1]s_perform_completion, by caching the result behind $__%[1]s_perform_completion_once_result
|
||||
function __%[1]s_perform_completion_once
|
||||
__%[1]s_debug "Starting __%[1]s_perform_completion_once"
|
||||
|
||||
if test -n "$__%[1]s_perform_completion_once_result"
|
||||
__%[1]s_debug "Seems like a valid result already exists, skipping __%[1]s_perform_completion"
|
||||
return 0
|
||||
end
|
||||
|
||||
set --global __%[1]s_perform_completion_once_result (__%[1]s_perform_completion)
|
||||
if test -z "$__%[1]s_perform_completion_once_result"
|
||||
__%[1]s_debug "No completions, probably due to a failure"
|
||||
return 1
|
||||
end
|
||||
|
||||
__%[1]s_debug "Performed completions and set __%[1]s_perform_completion_once_result"
|
||||
return 0
|
||||
end
|
||||
|
||||
# this function is used to clear the $__%[1]s_perform_completion_once_result variable after completions are run
|
||||
function __%[1]s_clear_perform_completion_once_result
|
||||
__%[1]s_debug ""
|
||||
__%[1]s_debug "========= clearing previously set __%[1]s_perform_completion_once_result variable =========="
|
||||
set --erase __%[1]s_perform_completion_once_result
|
||||
__%[1]s_debug "Succesfully erased the variable __%[1]s_perform_completion_once_result"
|
||||
end
|
||||
|
||||
function __%[1]s_requires_order_preservation
|
||||
__%[1]s_debug ""
|
||||
__%[1]s_debug "========= checking if order preservation is required =========="
|
||||
|
||||
__%[1]s_perform_completion_once
|
||||
if test -z "$__%[1]s_perform_completion_once_result"
|
||||
__%[1]s_debug "Error determining if order preservation is required"
|
||||
return 1
|
||||
end
|
||||
|
||||
set -l directive (string sub --start 2 $__%[1]s_perform_completion_once_result[-1])
|
||||
__%[1]s_debug "Directive is: $directive"
|
||||
|
||||
set -l shellCompDirectiveKeepOrder %[9]d
|
||||
set -l keeporder (math (math --scale 0 $directive / $shellCompDirectiveKeepOrder) %% 2)
|
||||
__%[1]s_debug "Keeporder is: $keeporder"
|
||||
|
||||
if test $keeporder -ne 0
|
||||
__%[1]s_debug "This does require order preservation"
|
||||
return 0
|
||||
end
|
||||
|
||||
__%[1]s_debug "This doesn't require order preservation"
|
||||
return 1
|
||||
end
|
||||
|
||||
|
||||
# This function does two things:
|
||||
# - Obtain the completions and store them in the global __%[1]s_comp_results
|
||||
# - Return false if file completion should be performed
|
||||
|
|
@ -99,17 +153,17 @@ function __%[1]s_prepare_completions
|
|||
# Start fresh
|
||||
set --erase __%[1]s_comp_results
|
||||
|
||||
set -l results (__%[1]s_perform_completion)
|
||||
__%[1]s_debug "Completion results: $results"
|
||||
__%[1]s_perform_completion_once
|
||||
__%[1]s_debug "Completion results: $__%[1]s_perform_completion_once_result"
|
||||
|
||||
if test -z "$results"
|
||||
if test -z "$__%[1]s_perform_completion_once_result"
|
||||
__%[1]s_debug "No completion, probably due to a failure"
|
||||
# Might as well do file completion, in case it helps
|
||||
return 1
|
||||
end
|
||||
|
||||
set -l directive (string sub --start 2 $results[-1])
|
||||
set --global __%[1]s_comp_results $results[1..-2]
|
||||
set -l directive (string sub --start 2 $__%[1]s_perform_completion_once_result[-1])
|
||||
set --global __%[1]s_comp_results $__%[1]s_perform_completion_once_result[1..-2]
|
||||
|
||||
__%[1]s_debug "Completions are: $__%[1]s_comp_results"
|
||||
__%[1]s_debug "Directive is: $directive"
|
||||
|
|
@ -205,13 +259,17 @@ end
|
|||
# Remove any pre-existing completions for the program since we will be handling all of them.
|
||||
complete -c %[2]s -e
|
||||
|
||||
# this will get called after the two calls below and clear the $__%[1]s_perform_completion_once_result global
|
||||
complete -c %[2]s -n '__%[1]s_clear_perform_completion_once_result'
|
||||
# The call to __%[1]s_prepare_completions will setup __%[1]s_comp_results
|
||||
# which provides the program's completion choices.
|
||||
complete -c %[2]s -n '__%[1]s_prepare_completions' -f -a '$__%[1]s_comp_results'
|
||||
|
||||
# If this doesn't require order preservation, we don't use the -k flag
|
||||
complete -c %[2]s -n 'not __%[1]s_requires_order_preservation && __%[1]s_prepare_completions' -f -a '$__%[1]s_comp_results'
|
||||
# otherwise we use the -k flag
|
||||
complete -k -c %[2]s -n '__%[1]s_requires_order_preservation && __%[1]s_prepare_completions' -f -a '$__%[1]s_comp_results'
|
||||
`, nameForVar, name, compCmd,
|
||||
ShellCompDirectiveError, ShellCompDirectiveNoSpace, ShellCompDirectiveNoFileComp,
|
||||
ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs, activeHelpEnvVar(name)))
|
||||
ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs, ShellCompDirectiveKeepOrder, activeHelpEnvVar(name)))
|
||||
}
|
||||
|
||||
// GenFishCompletion generates fish completion file and writes to the passed writer.
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2013-2022 The Cobra Authors
|
||||
// Copyright 2013-2023 The Cobra Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2013-2022 The Cobra Authors
|
||||
// Copyright 2013-2023 The Cobra Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
|
@ -77,6 +77,7 @@ filter __%[1]s_escapeStringWithSpecialChars {
|
|||
$ShellCompDirectiveNoFileComp=%[6]d
|
||||
$ShellCompDirectiveFilterFileExt=%[7]d
|
||||
$ShellCompDirectiveFilterDirs=%[8]d
|
||||
$ShellCompDirectiveKeepOrder=%[9]d
|
||||
|
||||
# Prepare the command to request completions for the program.
|
||||
# Split the command at the first space to separate the program and arguments.
|
||||
|
|
@ -106,13 +107,22 @@ filter __%[1]s_escapeStringWithSpecialChars {
|
|||
# If the last parameter is complete (there is a space following it)
|
||||
# We add an extra empty parameter so we can indicate this to the go method.
|
||||
__%[1]s_debug "Adding extra empty parameter"
|
||||
`+" # We need to use `\"`\" to pass an empty argument a \"\" or '' does not work!!!"+`
|
||||
`+" $RequestComp=\"$RequestComp\" + ' `\"`\"'"+`
|
||||
# PowerShell 7.2+ changed the way how the arguments are passed to executables,
|
||||
# so for pre-7.2 or when Legacy argument passing is enabled we need to use
|
||||
`+" # `\"`\" to pass an empty argument, a \"\" or '' does not work!!!"+`
|
||||
if ($PSVersionTable.PsVersion -lt [version]'7.2.0' -or
|
||||
($PSVersionTable.PsVersion -lt [version]'7.3.0' -and -not [ExperimentalFeature]::IsEnabled("PSNativeCommandArgumentPassing")) -or
|
||||
(($PSVersionTable.PsVersion -ge [version]'7.3.0' -or [ExperimentalFeature]::IsEnabled("PSNativeCommandArgumentPassing")) -and
|
||||
$PSNativeCommandArgumentPassing -eq 'Legacy')) {
|
||||
`+" $RequestComp=\"$RequestComp\" + ' `\"`\"'"+`
|
||||
} else {
|
||||
$RequestComp="$RequestComp" + ' ""'
|
||||
}
|
||||
}
|
||||
|
||||
__%[1]s_debug "Calling $RequestComp"
|
||||
# First disable ActiveHelp which is not supported for Powershell
|
||||
$env:%[9]s=0
|
||||
$env:%[10]s=0
|
||||
|
||||
#call the command store the output in $out and redirect stderr and stdout to null
|
||||
# $Out is an array contains each line per element
|
||||
|
|
@ -137,7 +147,7 @@ filter __%[1]s_escapeStringWithSpecialChars {
|
|||
}
|
||||
|
||||
$Longest = 0
|
||||
$Values = $Out | ForEach-Object {
|
||||
[Array]$Values = $Out | ForEach-Object {
|
||||
#Split the output in name and description
|
||||
`+" $Name, $Description = $_.Split(\"`t\",2)"+`
|
||||
__%[1]s_debug "Name: $Name Description: $Description"
|
||||
|
|
@ -182,6 +192,11 @@ filter __%[1]s_escapeStringWithSpecialChars {
|
|||
}
|
||||
}
|
||||
|
||||
# we sort the values in ascending order by name if keep order isn't passed
|
||||
if (($Directive -band $ShellCompDirectiveKeepOrder) -eq 0 ) {
|
||||
$Values = $Values | Sort-Object -Property Name
|
||||
}
|
||||
|
||||
if (($Directive -band $ShellCompDirectiveNoFileComp) -ne 0 ) {
|
||||
__%[1]s_debug "ShellCompDirectiveNoFileComp is called"
|
||||
|
||||
|
|
@ -267,7 +282,7 @@ filter __%[1]s_escapeStringWithSpecialChars {
|
|||
Register-ArgumentCompleter -CommandName '%[1]s' -ScriptBlock $__%[2]sCompleterBlock
|
||||
`, name, nameForVar, compCmd,
|
||||
ShellCompDirectiveError, ShellCompDirectiveNoSpace, ShellCompDirectiveNoFileComp,
|
||||
ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs, activeHelpEnvVar(name)))
|
||||
ShellCompDirectiveFilterFileExt, ShellCompDirectiveFilterDirs, ShellCompDirectiveKeepOrder, activeHelpEnvVar(name)))
|
||||
}
|
||||
|
||||
func (c *Command) genPowerShellCompletion(w io.Writer, includeDesc bool) error {
|
||||
|
|
|
|||
|
|
@ -1,11 +1,13 @@
|
|||
## Projects using Cobra
|
||||
|
||||
- [Allero](https://github.com/allero-io/allero)
|
||||
- [Arewefastyet](https://benchmark.vitess.io)
|
||||
- [Arduino CLI](https://github.com/arduino/arduino-cli)
|
||||
- [Bleve](https://blevesearch.com/)
|
||||
- [Cilium](https://cilium.io/)
|
||||
- [CloudQuery](https://github.com/cloudquery/cloudquery)
|
||||
- [CockroachDB](https://www.cockroachlabs.com/)
|
||||
- [Constellation](https://github.com/edgelesssys/constellation)
|
||||
- [Cosmos SDK](https://github.com/cosmos/cosmos-sdk)
|
||||
- [Datree](https://github.com/datreeio/datree)
|
||||
- [Delve](https://github.com/derekparker/delve)
|
||||
|
|
@ -25,7 +27,7 @@
|
|||
- [Istio](https://istio.io)
|
||||
- [Kool](https://github.com/kool-dev/kool)
|
||||
- [Kubernetes](https://kubernetes.io/)
|
||||
- [Kubescape](https://github.com/armosec/kubescape)
|
||||
- [Kubescape](https://github.com/kubescape/kubescape)
|
||||
- [KubeVirt](https://github.com/kubevirt/kubevirt)
|
||||
- [Linkerd](https://linkerd.io/)
|
||||
- [Mattermost-server](https://github.com/mattermost/mattermost-server)
|
||||
|
|
@ -51,10 +53,12 @@
|
|||
- [Random](https://github.com/erdaltsksn/random)
|
||||
- [Rclone](https://rclone.org/)
|
||||
- [Scaleway CLI](https://github.com/scaleway/scaleway-cli)
|
||||
- [Sia](https://github.com/SiaFoundation/siad)
|
||||
- [Skaffold](https://skaffold.dev/)
|
||||
- [Tendermint](https://github.com/tendermint/tendermint)
|
||||
- [Twitch CLI](https://github.com/twitchdev/twitch-cli)
|
||||
- [UpCloud CLI (`upctl`)](https://github.com/UpCloudLtd/upcloud-cli)
|
||||
- [Vitess](https://vitess.io)
|
||||
- VMware's [Tanzu Community Edition](https://github.com/vmware-tanzu/community-edition) & [Tanzu Framework](https://github.com/vmware-tanzu/tanzu-framework)
|
||||
- [Werf](https://werf.io/)
|
||||
- [ZITADEL](https://github.com/zitadel/zitadel)
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2013-2022 The Cobra Authors
|
||||
// Copyright 2013-2023 The Cobra Authors
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
|
|
|||
|
|
@ -71,7 +71,7 @@ PowerShell:
|
|||
`,cmd.Root().Name()),
|
||||
DisableFlagsInUseLine: true,
|
||||
ValidArgs: []string{"bash", "zsh", "fish", "powershell"},
|
||||
Args: cobra.ExactValidArgs(1),
|
||||
Args: cobra.MatchAll(cobra.ExactArgs(1), cobra.OnlyValidArgs),
|
||||
Run: func(cmd *cobra.Command, args []string) {
|
||||
switch args[0] {
|
||||
case "bash":
|
||||
|
|
@ -162,16 +162,7 @@ cmd := &cobra.Command{
|
|||
}
|
||||
```
|
||||
|
||||
The aliases are not shown to the user on tab completion, but they are accepted as valid nouns by
|
||||
the completion algorithm if entered manually, e.g. in:
|
||||
|
||||
```bash
|
||||
$ kubectl get rc [tab][tab]
|
||||
backend frontend database
|
||||
```
|
||||
|
||||
Note that without declaring `rc` as an alias, the completion algorithm would not know to show the list of
|
||||
replication controllers following `rc`.
|
||||
The aliases are shown to the user on tab completion only if no completions were found within sub-commands or `ValidArgs`.
|
||||
|
||||
### Dynamic completion of nouns
|
||||
|
||||
|
|
@ -237,6 +228,10 @@ ShellCompDirectiveFilterFileExt
|
|||
// return []string{"themes"}, ShellCompDirectiveFilterDirs
|
||||
//
|
||||
ShellCompDirectiveFilterDirs
|
||||
|
||||
// ShellCompDirectiveKeepOrder indicates that the shell should preserve the order
|
||||
// in which the completions are provided
|
||||
ShellCompDirectiveKeepOrder
|
||||
```
|
||||
|
||||
***Note***: When using the `ValidArgsFunction`, Cobra will call your registered function after having parsed all flags and arguments provided in the command-line. You therefore don't need to do this parsing yourself. For example, when a user calls `helm status --namespace my-rook-ns [tab][tab]`, Cobra will call your registered `ValidArgsFunction` after having parsed the `--namespace` flag, as it would have done when calling the `RunE` function.
|
||||
|
|
@ -385,6 +380,19 @@ or
|
|||
```go
|
||||
ValidArgs: []string{"bash\tCompletions for bash", "zsh\tCompletions for zsh"}
|
||||
```
|
||||
|
||||
If you don't want to show descriptions in the completions, you can add `--no-descriptions` to the default `completion` command to disable them, like:
|
||||
|
||||
```bash
|
||||
$ source <(helm completion bash)
|
||||
$ helm completion [tab][tab]
|
||||
bash (generate autocompletion script for bash) powershell (generate autocompletion script for powershell)
|
||||
fish (generate autocompletion script for fish) zsh (generate autocompletion script for zsh)
|
||||
|
||||
$ source <(helm completion bash --no-descriptions)
|
||||
$ helm completion [tab][tab]
|
||||
bash fish powershell zsh
|
||||
```
|
||||
## Bash completions
|
||||
|
||||
### Dependencies
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue