mirror of https://github.com/knative/client.git
Update dependencies, pin k8s deps. (#855)
This commit is contained in:
parent
4b3b971ae5
commit
4ba1cca91e
35
go.mod
35
go.mod
|
|
@ -1,28 +1,22 @@
|
|||
module knative.dev/client
|
||||
|
||||
require (
|
||||
contrib.go.opencensus.io/exporter/ocagent v0.6.0 // indirect
|
||||
contrib.go.opencensus.io/exporter/prometheus v0.1.0 // indirect
|
||||
contrib.go.opencensus.io/exporter/stackdriver v0.13.1 // indirect
|
||||
github.com/google/go-containerregistry v0.0.0-20200413145205-82d30a103c0a // indirect
|
||||
github.com/mitchellh/go-homedir v1.1.0
|
||||
github.com/openzipkin/zipkin-go v0.2.2 // indirect
|
||||
github.com/robfig/cron v1.2.0 // indirect
|
||||
github.com/spf13/cobra v0.0.5
|
||||
github.com/spf13/cobra v0.0.6
|
||||
github.com/spf13/pflag v1.0.5
|
||||
github.com/spf13/viper v1.4.0
|
||||
golang.org/x/crypto v0.0.0-20200220183623-bac4c82f6975
|
||||
gomodules.xyz/jsonpatch/v2 v2.1.0 // indirect
|
||||
github.com/spf13/viper v1.6.2
|
||||
golang.org/x/crypto v0.0.0-20200302210943-78000ba7a073
|
||||
gotest.tools v2.2.0+incompatible
|
||||
k8s.io/api v0.17.4
|
||||
k8s.io/apimachinery v0.17.4
|
||||
k8s.io/cli-runtime v0.17.0
|
||||
k8s.io/client-go v0.17.4
|
||||
knative.dev/eventing v0.14.1
|
||||
knative.dev/pkg v0.0.0-20200414233146-0eed424fa4ee
|
||||
knative.dev/serving v0.14.0
|
||||
knative.dev/test-infra v0.0.0-20200413202711-9cf64fb1b912
|
||||
sigs.k8s.io/yaml v1.1.0
|
||||
k8s.io/cli-runtime v0.17.4
|
||||
k8s.io/client-go v11.0.1-0.20190805182717-6502b5e7b1b5+incompatible
|
||||
knative.dev/eventing v0.14.1-0.20200523184044-78d7fbb41f8a
|
||||
knative.dev/pkg v0.0.0-20200522212244-870993f63e81
|
||||
knative.dev/serving v0.14.1-0.20200524222346-2b805814b468
|
||||
knative.dev/test-infra v0.0.0-20200522180958-6a0a9b9d893a
|
||||
sigs.k8s.io/yaml v1.2.0
|
||||
)
|
||||
|
||||
// Temporary pinning certain libraries. Please check periodically, whether these are still needed
|
||||
|
|
@ -30,6 +24,13 @@ require (
|
|||
|
||||
// Fix for `[` in help messages and shell completion code
|
||||
// See https://github.com/spf13/cobra/pull/899
|
||||
replace github.com/spf13/cobra => github.com/chmouel/cobra v0.0.0-20191021105835-a78788917390
|
||||
replace (
|
||||
github.com/spf13/cobra => github.com/chmouel/cobra v0.0.0-20191021105835-a78788917390
|
||||
|
||||
k8s.io/api => k8s.io/api v0.17.4
|
||||
k8s.io/apimachinery => k8s.io/apimachinery v0.17.4
|
||||
k8s.io/cli-runtime => k8s.io/cli-runtime v0.17.4
|
||||
k8s.io/client-go => k8s.io/client-go v0.17.4
|
||||
)
|
||||
|
||||
go 1.13
|
||||
|
|
|
|||
|
|
@ -161,7 +161,7 @@ func WriteImage(dw printers.PrefixWriter, revision *servingv1.Revision) {
|
|||
// Check if the user image is likely a more user-friendly description
|
||||
pinnedDesc := "at"
|
||||
userImage := clientserving.UserImage(&revision.ObjectMeta)
|
||||
imageDigest := revision.Status.ImageDigest
|
||||
imageDigest := revision.Status.DeprecatedImageDigest
|
||||
if userImage != "" && imageDigest != "" {
|
||||
var parts []string
|
||||
if strings.Contains(image, "@") {
|
||||
|
|
|
|||
|
|
@ -164,7 +164,7 @@ func createTestRevision(revision string, gen int64) servingv1.Revision {
|
|||
},
|
||||
},
|
||||
Status: servingv1.RevisionStatus{
|
||||
ImageDigest: "gcr.io/test/image@" + imageDigest,
|
||||
DeprecatedImageDigest: "gcr.io/test/image@" + imageDigest,
|
||||
Status: duckv1.Status{
|
||||
Conditions: goodConditions(),
|
||||
},
|
||||
|
|
|
|||
|
|
@ -399,7 +399,7 @@ func TestServiceDescribeUserImageVsImage(t *testing.T) {
|
|||
rev3.Annotations[client_serving.UserImageAnnotationKey] = "gcr.io/test/image:latest"
|
||||
rev3.Spec.Containers[0].Image = "gcr.io/a/b"
|
||||
// rev4 is without the annotation at all and no hash
|
||||
rev4.Status.ImageDigest = ""
|
||||
rev4.Status.DeprecatedImageDigest = ""
|
||||
rev4.Spec.Containers[0].Image = "gcr.io/x/y"
|
||||
|
||||
// Fetch the revisions
|
||||
|
|
@ -745,7 +745,7 @@ func createTestRevision(revision string, gen int64, conditions duckv1.Conditions
|
|||
},
|
||||
},
|
||||
Status: servingv1.RevisionStatus{
|
||||
ImageDigest: "gcr.io/test/image@" + imageDigest,
|
||||
DeprecatedImageDigest: "gcr.io/test/image@" + imageDigest,
|
||||
Status: duckv1.Status{
|
||||
Conditions: conditions,
|
||||
},
|
||||
|
|
|
|||
|
|
@ -88,7 +88,7 @@ func fakeServiceUpdate(original *servingv1.Service, args []string) (
|
|||
rev.Spec = original.Spec.Template.Spec
|
||||
rev.ObjectMeta = original.Spec.Template.ObjectMeta
|
||||
rev.Name = original.Status.LatestCreatedRevisionName
|
||||
rev.Status.ImageDigest = exampleImageByDigest
|
||||
rev.Status.DeprecatedImageDigest = exampleImageByDigest
|
||||
return true, rev, nil
|
||||
})
|
||||
|
||||
|
|
|
|||
|
|
@ -225,7 +225,10 @@ func UpdateRevisionTemplateAnnotation(template *servingv1.RevisionTemplateSpec,
|
|||
// without changing the existing spec
|
||||
in := make(map[string]string)
|
||||
in[annotation] = value
|
||||
if err := autoscaling.ValidateAnnotations(in); err != nil {
|
||||
// The boolean indicates whether or not the init-scale annotation can be set to 0.
|
||||
// Since we don't have the config handy, err towards allowing it. The API will
|
||||
// correctly fail the request if it's forbidden.
|
||||
if err := autoscaling.ValidateAnnotations(true, in); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
|
|
@ -302,8 +305,8 @@ func FreezeImageToDigest(template *servingv1.RevisionTemplateSpec, baseRevision
|
|||
return fmt.Errorf("could not freeze image to digest since current revision contains unexpected image")
|
||||
}
|
||||
|
||||
if baseRevision.Status.ImageDigest != "" {
|
||||
return UpdateImage(template, baseRevision.Status.ImageDigest)
|
||||
if baseRevision.Status.DeprecatedImageDigest != "" {
|
||||
return UpdateImage(template, baseRevision.Status.DeprecatedImageDigest)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -125,7 +125,7 @@ func TestFreezeImageToDigest(t *testing.T) {
|
|||
revision := &servingv1.Revision{}
|
||||
revision.Spec = template.Spec
|
||||
revision.ObjectMeta = template.ObjectMeta
|
||||
revision.Status.ImageDigest = "gcr.io/foo/bar@sha256:deadbeef"
|
||||
revision.Status.DeprecatedImageDigest = "gcr.io/foo/bar@sha256:deadbeef"
|
||||
container.Image = "gcr.io/foo/bar:latest"
|
||||
FreezeImageToDigest(template, revision)
|
||||
assert.Equal(t, container.Image, "gcr.io/foo/bar@sha256:deadbeef")
|
||||
|
|
|
|||
|
|
@ -61,25 +61,14 @@ var (
|
|||
instID = &cachedValue{k: "instance/id", trim: true}
|
||||
)
|
||||
|
||||
var (
|
||||
defaultClient = &Client{hc: &http.Client{
|
||||
Transport: &http.Transport{
|
||||
Dial: (&net.Dialer{
|
||||
Timeout: 2 * time.Second,
|
||||
KeepAlive: 30 * time.Second,
|
||||
}).Dial,
|
||||
ResponseHeaderTimeout: 2 * time.Second,
|
||||
},
|
||||
}}
|
||||
subscribeClient = &Client{hc: &http.Client{
|
||||
Transport: &http.Transport{
|
||||
Dial: (&net.Dialer{
|
||||
Timeout: 2 * time.Second,
|
||||
KeepAlive: 30 * time.Second,
|
||||
}).Dial,
|
||||
},
|
||||
}}
|
||||
)
|
||||
var defaultClient = &Client{hc: &http.Client{
|
||||
Transport: &http.Transport{
|
||||
Dial: (&net.Dialer{
|
||||
Timeout: 2 * time.Second,
|
||||
KeepAlive: 30 * time.Second,
|
||||
}).Dial,
|
||||
},
|
||||
}}
|
||||
|
||||
// NotDefinedError is returned when requested metadata is not defined.
|
||||
//
|
||||
|
|
@ -206,10 +195,9 @@ func systemInfoSuggestsGCE() bool {
|
|||
return name == "Google" || name == "Google Compute Engine"
|
||||
}
|
||||
|
||||
// Subscribe calls Client.Subscribe on a client designed for subscribing (one with no
|
||||
// ResponseHeaderTimeout).
|
||||
// Subscribe calls Client.Subscribe on the default client.
|
||||
func Subscribe(suffix string, fn func(v string, ok bool) error) error {
|
||||
return subscribeClient.Subscribe(suffix, fn)
|
||||
return defaultClient.Subscribe(suffix, fn)
|
||||
}
|
||||
|
||||
// Get calls Client.Get on the default client.
|
||||
|
|
@ -280,9 +268,14 @@ type Client struct {
|
|||
hc *http.Client
|
||||
}
|
||||
|
||||
// NewClient returns a Client that can be used to fetch metadata. All HTTP requests
|
||||
// will use the given http.Client instead of the default client.
|
||||
// NewClient returns a Client that can be used to fetch metadata.
|
||||
// Returns the client that uses the specified http.Client for HTTP requests.
|
||||
// If nil is specified, returns the default client.
|
||||
func NewClient(c *http.Client) *Client {
|
||||
if c == nil {
|
||||
return defaultClient
|
||||
}
|
||||
|
||||
return &Client{hc: c}
|
||||
}
|
||||
|
||||
|
|
@ -304,7 +297,10 @@ func (c *Client) getETag(suffix string) (value, etag string, err error) {
|
|||
host = metadataIP
|
||||
}
|
||||
u := "http://" + host + "/computeMetadata/v1/" + suffix
|
||||
req, _ := http.NewRequest("GET", u, nil)
|
||||
req, err := http.NewRequest("GET", u, nil)
|
||||
if err != nil {
|
||||
return "", "", err
|
||||
}
|
||||
req.Header.Set("Metadata-Flavor", "Google")
|
||||
req.Header.Set("User-Agent", userAgent)
|
||||
res, err := c.hc.Do(req)
|
||||
|
|
@ -407,11 +403,7 @@ func (c *Client) InstanceTags() ([]string, error) {
|
|||
|
||||
// InstanceName returns the current VM's instance ID string.
|
||||
func (c *Client) InstanceName() (string, error) {
|
||||
host, err := c.Hostname()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return strings.Split(host, ".")[0], nil
|
||||
return c.getTrimmed("instance/name")
|
||||
}
|
||||
|
||||
// Zone returns the current VM's zone, such as "us-central1-b".
|
||||
|
|
|
|||
|
|
@ -21,3 +21,8 @@ linters:
|
|||
- lll
|
||||
- gochecknoinits
|
||||
- gochecknoglobals
|
||||
- funlen
|
||||
- godox
|
||||
- gocognit
|
||||
- whitespace
|
||||
- wsl
|
||||
|
|
|
|||
File diff suppressed because one or more lines are too long
|
|
@ -14,11 +14,41 @@
|
|||
|
||||
package spec
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/go-openapi/swag"
|
||||
)
|
||||
|
||||
// ContactInfo contact information for the exposed API.
|
||||
//
|
||||
// For more information: http://goo.gl/8us55a#contactObject
|
||||
type ContactInfo struct {
|
||||
ContactInfoProps
|
||||
VendorExtensible
|
||||
}
|
||||
|
||||
type ContactInfoProps struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
URL string `json:"url,omitempty"`
|
||||
Email string `json:"email,omitempty"`
|
||||
}
|
||||
|
||||
func (c *ContactInfo) UnmarshalJSON(data []byte) error {
|
||||
if err := json.Unmarshal(data, &c.ContactInfoProps); err != nil {
|
||||
return err
|
||||
}
|
||||
return json.Unmarshal(data, &c.VendorExtensible)
|
||||
}
|
||||
|
||||
func (c ContactInfo) MarshalJSON() ([]byte, error) {
|
||||
b1, err := json.Marshal(c.ContactInfoProps)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
b2, err := json.Marshal(c.VendorExtensible)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return swag.ConcatJSON(b1, b2), nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -452,11 +452,12 @@ func expandPathItem(pathItem *PathItem, resolver *schemaLoader, basePath string)
|
|||
return err
|
||||
}
|
||||
if pathItem.Ref.String() != "" {
|
||||
var err error
|
||||
resolver, err = resolver.transitiveResolver(basePath, pathItem.Ref)
|
||||
if resolver.shouldStopOnError(err) {
|
||||
transitiveResolver, err := resolver.transitiveResolver(basePath, pathItem.Ref)
|
||||
if transitiveResolver.shouldStopOnError(err) {
|
||||
return err
|
||||
}
|
||||
basePath = transitiveResolver.updateBasePath(resolver, basePath)
|
||||
resolver = transitiveResolver
|
||||
}
|
||||
pathItem.Ref = Ref{}
|
||||
|
||||
|
|
|
|||
|
|
@ -4,14 +4,9 @@ require (
|
|||
github.com/go-openapi/jsonpointer v0.19.3
|
||||
github.com/go-openapi/jsonreference v0.19.2
|
||||
github.com/go-openapi/swag v0.19.5
|
||||
github.com/kr/pty v1.1.5 // indirect
|
||||
github.com/stretchr/objx v0.2.0 // indirect
|
||||
github.com/stretchr/testify v1.3.0
|
||||
golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8 // indirect
|
||||
golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297 // indirect
|
||||
golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f // indirect
|
||||
golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59 // indirect
|
||||
gopkg.in/yaml.v2 v2.2.2
|
||||
gopkg.in/yaml.v2 v2.2.4
|
||||
)
|
||||
|
||||
go 1.13
|
||||
|
|
|
|||
|
|
@ -1,5 +1,3 @@
|
|||
github.com/PuerkitoBio/purell v1.1.0 h1:rmGxhojJlM0tuKtfdvliR84CFHljx9ag64t2xmVkjK4=
|
||||
github.com/PuerkitoBio/purell v1.1.0/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
|
||||
github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tNFfI=
|
||||
github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
|
||||
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M=
|
||||
|
|
@ -7,20 +5,12 @@ github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdko
|
|||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/go-openapi/jsonpointer v0.17.0 h1:nH6xp8XdXHx8dqveo0ZuJBluCO2qGrPbDNZ0dwoRHP0=
|
||||
github.com/go-openapi/jsonpointer v0.17.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M=
|
||||
github.com/go-openapi/jsonpointer v0.19.0 h1:FTUMcX77w5rQkClIzDtTxvn6Bsa894CcrzNj2MMfeg8=
|
||||
github.com/go-openapi/jsonpointer v0.19.0/go.mod h1:cOnomiV+CVVwFLk0A/MExoFMjwdsUdVpsRhURCKh+3M=
|
||||
github.com/go-openapi/jsonpointer v0.19.2 h1:A9+F4Dc/MCNB5jibxf6rRvOvR/iFgQdyNx9eIhnGqq0=
|
||||
github.com/go-openapi/jsonpointer v0.19.2/go.mod h1:3akKfEdA7DF1sugOqz1dVQHBcuDBPKZGEoHC/NkiQRg=
|
||||
github.com/go-openapi/jsonpointer v0.19.3 h1:gihV7YNZK1iK6Tgwwsxo2rJbD1GTbdm72325Bq8FI3w=
|
||||
github.com/go-openapi/jsonpointer v0.19.3/go.mod h1:Pl9vOtqEWErmShwVjC8pYs9cog34VGT37dQOVbmoatg=
|
||||
github.com/go-openapi/jsonreference v0.19.0 h1:BqWKpV1dFd+AuiKlgtddwVIFQsuMpxfBDBHGfM2yNpk=
|
||||
github.com/go-openapi/jsonreference v0.19.0/go.mod h1:g4xxGn04lDIRh0GJb5QlpE3HfopLOL6uZrK/VgnsK9I=
|
||||
github.com/go-openapi/jsonreference v0.19.2 h1:o20suLFB4Ri0tuzpWtyHlh7E7HnkqTNLq6aR6WVNS1w=
|
||||
github.com/go-openapi/jsonreference v0.19.2/go.mod h1:jMjeRr2HHw6nAVajTXJ4eiUwohSTlpa0o73RUL1owJc=
|
||||
github.com/go-openapi/swag v0.17.0 h1:iqrgMg7Q7SvtbWLlltPrkMs0UBJI6oTSs79JFRUi880=
|
||||
github.com/go-openapi/swag v0.17.0/go.mod h1:AByQ+nYG6gQg71GINrmuDXCPWdL640yX49/kXLo40Tg=
|
||||
github.com/go-openapi/swag v0.19.2 h1:jvO6bCMBEilGwMfHhrd61zIID4oIFdwb76V17SM88dE=
|
||||
github.com/go-openapi/swag v0.19.2/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh66Z9tfKk=
|
||||
github.com/go-openapi/swag v0.19.5 h1:lTz6Ys4CmqqCQmZPBlbQENR1/GucA2bzYTE12Pw4tFY=
|
||||
|
|
@ -28,11 +18,8 @@ github.com/go-openapi/swag v0.19.5/go.mod h1:POnQmlKehdgb5mhVOsnJFsivZCEZ/vjK9gh
|
|||
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/pty v1.1.5/go.mod h1:9r2w37qlBe7rQ6e1fg1S/9xpWHSnaqNdHD3WcMdbPDA=
|
||||
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329 h1:2gxZ0XQIU/5z3Z3bUBu+FXuk2pFbkN6tcwi/pjyaDic=
|
||||
github.com/mailru/easyjson v0.0.0-20180823135443-60711f1a8329/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
||||
github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63 h1:nTT4s92Dgz2HlrB2NaMgvlfqHH39OgMhA7z3PK7PGD4=
|
||||
github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63/go.mod h1:C1wdFJiN94OJF2b5HbByQZoLdCWB1Yqtg26g4irojpc=
|
||||
github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e h1:hB2xlXdHp/pmPZq0y3QnmWAArdw9PqbmotexnWx/FU8=
|
||||
|
|
@ -40,35 +27,23 @@ github.com/mailru/easyjson v0.0.0-20190626092158-b2ccc519800e/go.mod h1:C1wdFJiN
|
|||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
|
||||
github.com/stretchr/testify v1.2.2 h1:bSDNvY7ZPG5RlJ8otE/7V6gMiyenm9RtJ7IUVIAoJ1w=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0Q=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190611184440-5c40567a22f8/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/net v0.0.0-20181005035420-146acd28ed58 h1:otZG8yDCO4LVps5+9bxOeNiCvgmOyt96J3roHTYs7oE=
|
||||
golang.org/x/net v0.0.0-20181005035420-146acd28ed58/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190613194153-d28f0bde5980 h1:dfGZHvZk057jK2MCeWus/TowKpJ8y4AmooUzdBSR9GU=
|
||||
golang.org/x/net v0.0.0-20190613194153-d28f0bde5980/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297 h1:k7pJ2yAPLPgbskkFdhRCsA77k2fySZ1zf2zCjvQCiIM=
|
||||
golang.org/x/net v0.0.0-20190827160401-ba9fcec4b297/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190616124812-15dcb6c0061f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
|
||||
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190614205625-5aca471b1d59/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v2 v2.2.1 h1:mUhvW9EsL+naU5Q3cakzfE91YhliOondGd6ZrsDBHQE=
|
||||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I=
|
||||
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
|
|
|
|||
|
|
@ -14,10 +14,40 @@
|
|||
|
||||
package spec
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/go-openapi/swag"
|
||||
)
|
||||
|
||||
// License information for the exposed API.
|
||||
//
|
||||
// For more information: http://goo.gl/8us55a#licenseObject
|
||||
type License struct {
|
||||
LicenseProps
|
||||
VendorExtensible
|
||||
}
|
||||
|
||||
type LicenseProps struct {
|
||||
Name string `json:"name,omitempty"`
|
||||
URL string `json:"url,omitempty"`
|
||||
}
|
||||
|
||||
func (l *License) UnmarshalJSON(data []byte) error {
|
||||
if err := json.Unmarshal(data, &l.LicenseProps); err != nil {
|
||||
return err
|
||||
}
|
||||
return json.Unmarshal(data, &l.VendorExtensible)
|
||||
}
|
||||
|
||||
func (l License) MarshalJSON() ([]byte, error) {
|
||||
b1, err := json.Marshal(l.LicenseProps)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
b2, err := json.Marshal(l.VendorExtensible)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return swag.ConcatJSON(b1, b2), nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -68,10 +68,12 @@ func (r *Ref) IsValidURI(basepaths ...string) bool {
|
|||
}
|
||||
|
||||
if r.HasFullURL {
|
||||
//#nosec
|
||||
rr, err := http.Get(v)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
defer rr.Body.Close()
|
||||
|
||||
return rr.StatusCode/100 == 2
|
||||
}
|
||||
|
|
|
|||
|
|
@ -86,12 +86,7 @@ func (r *schemaLoader) transitiveResolver(basePath string, ref Ref) (*schemaLoad
|
|||
newOptions := r.options
|
||||
newOptions.RelativeBase = rootURL.String()
|
||||
debugLog("setting new root: %s", newOptions.RelativeBase)
|
||||
resolver, err := defaultSchemaLoader(root, newOptions, r.cache, r.context)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return resolver, nil
|
||||
return defaultSchemaLoader(root, newOptions, r.cache, r.context)
|
||||
}
|
||||
|
||||
func (r *schemaLoader) updateBasePath(transitive *schemaLoader, basePath string) string {
|
||||
|
|
|
|||
|
|
@ -88,7 +88,7 @@ func ConvertFloat64(str string) (float64, error) {
|
|||
return strconv.ParseFloat(str, 64)
|
||||
}
|
||||
|
||||
// ConvertInt8 turn a string into int8 boolean
|
||||
// ConvertInt8 turn a string into an int8
|
||||
func ConvertInt8(str string) (int8, error) {
|
||||
i, err := strconv.ParseInt(str, 10, 8)
|
||||
if err != nil {
|
||||
|
|
@ -97,7 +97,7 @@ func ConvertInt8(str string) (int8, error) {
|
|||
return int8(i), nil
|
||||
}
|
||||
|
||||
// ConvertInt16 turn a string into a int16
|
||||
// ConvertInt16 turn a string into an int16
|
||||
func ConvertInt16(str string) (int16, error) {
|
||||
i, err := strconv.ParseInt(str, 10, 16)
|
||||
if err != nil {
|
||||
|
|
@ -106,7 +106,7 @@ func ConvertInt16(str string) (int16, error) {
|
|||
return int16(i), nil
|
||||
}
|
||||
|
||||
// ConvertInt32 turn a string into a int32
|
||||
// ConvertInt32 turn a string into an int32
|
||||
func ConvertInt32(str string) (int32, error) {
|
||||
i, err := strconv.ParseInt(str, 10, 32)
|
||||
if err != nil {
|
||||
|
|
@ -115,12 +115,12 @@ func ConvertInt32(str string) (int32, error) {
|
|||
return int32(i), nil
|
||||
}
|
||||
|
||||
// ConvertInt64 turn a string into a int64
|
||||
// ConvertInt64 turn a string into an int64
|
||||
func ConvertInt64(str string) (int64, error) {
|
||||
return strconv.ParseInt(str, 10, 64)
|
||||
}
|
||||
|
||||
// ConvertUint8 turn a string into a uint8
|
||||
// ConvertUint8 turn a string into an uint8
|
||||
func ConvertUint8(str string) (uint8, error) {
|
||||
i, err := strconv.ParseUint(str, 10, 8)
|
||||
if err != nil {
|
||||
|
|
@ -129,7 +129,7 @@ func ConvertUint8(str string) (uint8, error) {
|
|||
return uint8(i), nil
|
||||
}
|
||||
|
||||
// ConvertUint16 turn a string into a uint16
|
||||
// ConvertUint16 turn a string into an uint16
|
||||
func ConvertUint16(str string) (uint16, error) {
|
||||
i, err := strconv.ParseUint(str, 10, 16)
|
||||
if err != nil {
|
||||
|
|
@ -138,7 +138,7 @@ func ConvertUint16(str string) (uint16, error) {
|
|||
return uint16(i), nil
|
||||
}
|
||||
|
||||
// ConvertUint32 turn a string into a uint32
|
||||
// ConvertUint32 turn a string into an uint32
|
||||
func ConvertUint32(str string) (uint32, error) {
|
||||
i, err := strconv.ParseUint(str, 10, 32)
|
||||
if err != nil {
|
||||
|
|
@ -147,7 +147,7 @@ func ConvertUint32(str string) (uint32, error) {
|
|||
return uint32(i), nil
|
||||
}
|
||||
|
||||
// ConvertUint64 turn a string into a uint64
|
||||
// ConvertUint64 turn a string into an uint64
|
||||
func ConvertUint64(str string) (uint64, error) {
|
||||
return strconv.ParseUint(str, 10, 64)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -181,12 +181,12 @@ func IntValueMap(src map[string]*int) map[string]int {
|
|||
return dst
|
||||
}
|
||||
|
||||
// Int32 returns a pointer to of the int64 value passed in.
|
||||
// Int32 returns a pointer to of the int32 value passed in.
|
||||
func Int32(v int32) *int32 {
|
||||
return &v
|
||||
}
|
||||
|
||||
// Int32Value returns the value of the int64 pointer passed in or
|
||||
// Int32Value returns the value of the int32 pointer passed in or
|
||||
// 0 if the pointer is nil.
|
||||
func Int32Value(v *int32) int32 {
|
||||
if v != nil {
|
||||
|
|
@ -195,7 +195,7 @@ func Int32Value(v *int32) int32 {
|
|||
return 0
|
||||
}
|
||||
|
||||
// Int32Slice converts a slice of int64 values into a slice of
|
||||
// Int32Slice converts a slice of int32 values into a slice of
|
||||
// int32 pointers
|
||||
func Int32Slice(src []int32) []*int32 {
|
||||
dst := make([]*int32, len(src))
|
||||
|
|
@ -299,13 +299,13 @@ func Int64ValueMap(src map[string]*int64) map[string]int64 {
|
|||
return dst
|
||||
}
|
||||
|
||||
// Uint returns a pouinter to of the uint value passed in.
|
||||
// Uint returns a pointer to of the uint value passed in.
|
||||
func Uint(v uint) *uint {
|
||||
return &v
|
||||
}
|
||||
|
||||
// UintValue returns the value of the uint pouinter passed in or
|
||||
// 0 if the pouinter is nil.
|
||||
// UintValue returns the value of the uint pointer passed in or
|
||||
// 0 if the pointer is nil.
|
||||
func UintValue(v *uint) uint {
|
||||
if v != nil {
|
||||
return *v
|
||||
|
|
@ -313,8 +313,8 @@ func UintValue(v *uint) uint {
|
|||
return 0
|
||||
}
|
||||
|
||||
// UintSlice converts a slice of uint values uinto a slice of
|
||||
// uint pouinters
|
||||
// UintSlice converts a slice of uint values into a slice of
|
||||
// uint pointers
|
||||
func UintSlice(src []uint) []*uint {
|
||||
dst := make([]*uint, len(src))
|
||||
for i := 0; i < len(src); i++ {
|
||||
|
|
@ -323,7 +323,7 @@ func UintSlice(src []uint) []*uint {
|
|||
return dst
|
||||
}
|
||||
|
||||
// UintValueSlice converts a slice of uint pouinters uinto a slice of
|
||||
// UintValueSlice converts a slice of uint pointers into a slice of
|
||||
// uint values
|
||||
func UintValueSlice(src []*uint) []uint {
|
||||
dst := make([]uint, len(src))
|
||||
|
|
@ -335,8 +335,8 @@ func UintValueSlice(src []*uint) []uint {
|
|||
return dst
|
||||
}
|
||||
|
||||
// UintMap converts a string map of uint values uinto a string
|
||||
// map of uint pouinters
|
||||
// UintMap converts a string map of uint values into a string
|
||||
// map of uint pointers
|
||||
func UintMap(src map[string]uint) map[string]*uint {
|
||||
dst := make(map[string]*uint)
|
||||
for k, val := range src {
|
||||
|
|
@ -346,7 +346,7 @@ func UintMap(src map[string]uint) map[string]*uint {
|
|||
return dst
|
||||
}
|
||||
|
||||
// UintValueMap converts a string map of uint pouinters uinto a string
|
||||
// UintValueMap converts a string map of uint pointers into a string
|
||||
// map of uint values
|
||||
func UintValueMap(src map[string]*uint) map[string]uint {
|
||||
dst := make(map[string]uint)
|
||||
|
|
@ -358,13 +358,13 @@ func UintValueMap(src map[string]*uint) map[string]uint {
|
|||
return dst
|
||||
}
|
||||
|
||||
// Uint32 returns a pouinter to of the uint64 value passed in.
|
||||
// Uint32 returns a pointer to of the uint32 value passed in.
|
||||
func Uint32(v uint32) *uint32 {
|
||||
return &v
|
||||
}
|
||||
|
||||
// Uint32Value returns the value of the uint64 pouinter passed in or
|
||||
// 0 if the pouinter is nil.
|
||||
// Uint32Value returns the value of the uint32 pointer passed in or
|
||||
// 0 if the pointer is nil.
|
||||
func Uint32Value(v *uint32) uint32 {
|
||||
if v != nil {
|
||||
return *v
|
||||
|
|
@ -372,8 +372,8 @@ func Uint32Value(v *uint32) uint32 {
|
|||
return 0
|
||||
}
|
||||
|
||||
// Uint32Slice converts a slice of uint64 values uinto a slice of
|
||||
// uint32 pouinters
|
||||
// Uint32Slice converts a slice of uint32 values into a slice of
|
||||
// uint32 pointers
|
||||
func Uint32Slice(src []uint32) []*uint32 {
|
||||
dst := make([]*uint32, len(src))
|
||||
for i := 0; i < len(src); i++ {
|
||||
|
|
@ -382,7 +382,7 @@ func Uint32Slice(src []uint32) []*uint32 {
|
|||
return dst
|
||||
}
|
||||
|
||||
// Uint32ValueSlice converts a slice of uint32 pouinters uinto a slice of
|
||||
// Uint32ValueSlice converts a slice of uint32 pointers into a slice of
|
||||
// uint32 values
|
||||
func Uint32ValueSlice(src []*uint32) []uint32 {
|
||||
dst := make([]uint32, len(src))
|
||||
|
|
@ -394,8 +394,8 @@ func Uint32ValueSlice(src []*uint32) []uint32 {
|
|||
return dst
|
||||
}
|
||||
|
||||
// Uint32Map converts a string map of uint32 values uinto a string
|
||||
// map of uint32 pouinters
|
||||
// Uint32Map converts a string map of uint32 values into a string
|
||||
// map of uint32 pointers
|
||||
func Uint32Map(src map[string]uint32) map[string]*uint32 {
|
||||
dst := make(map[string]*uint32)
|
||||
for k, val := range src {
|
||||
|
|
@ -405,7 +405,7 @@ func Uint32Map(src map[string]uint32) map[string]*uint32 {
|
|||
return dst
|
||||
}
|
||||
|
||||
// Uint32ValueMap converts a string map of uint32 pouinters uinto a string
|
||||
// Uint32ValueMap converts a string map of uint32 pointers into a string
|
||||
// map of uint32 values
|
||||
func Uint32ValueMap(src map[string]*uint32) map[string]uint32 {
|
||||
dst := make(map[string]uint32)
|
||||
|
|
@ -417,13 +417,13 @@ func Uint32ValueMap(src map[string]*uint32) map[string]uint32 {
|
|||
return dst
|
||||
}
|
||||
|
||||
// Uint64 returns a pouinter to of the uint64 value passed in.
|
||||
// Uint64 returns a pointer to of the uint64 value passed in.
|
||||
func Uint64(v uint64) *uint64 {
|
||||
return &v
|
||||
}
|
||||
|
||||
// Uint64Value returns the value of the uint64 pouinter passed in or
|
||||
// 0 if the pouinter is nil.
|
||||
// Uint64Value returns the value of the uint64 pointer passed in or
|
||||
// 0 if the pointer is nil.
|
||||
func Uint64Value(v *uint64) uint64 {
|
||||
if v != nil {
|
||||
return *v
|
||||
|
|
@ -431,8 +431,8 @@ func Uint64Value(v *uint64) uint64 {
|
|||
return 0
|
||||
}
|
||||
|
||||
// Uint64Slice converts a slice of uint64 values uinto a slice of
|
||||
// uint64 pouinters
|
||||
// Uint64Slice converts a slice of uint64 values into a slice of
|
||||
// uint64 pointers
|
||||
func Uint64Slice(src []uint64) []*uint64 {
|
||||
dst := make([]*uint64, len(src))
|
||||
for i := 0; i < len(src); i++ {
|
||||
|
|
@ -441,7 +441,7 @@ func Uint64Slice(src []uint64) []*uint64 {
|
|||
return dst
|
||||
}
|
||||
|
||||
// Uint64ValueSlice converts a slice of uint64 pouinters uinto a slice of
|
||||
// Uint64ValueSlice converts a slice of uint64 pointers into a slice of
|
||||
// uint64 values
|
||||
func Uint64ValueSlice(src []*uint64) []uint64 {
|
||||
dst := make([]uint64, len(src))
|
||||
|
|
@ -453,8 +453,8 @@ func Uint64ValueSlice(src []*uint64) []uint64 {
|
|||
return dst
|
||||
}
|
||||
|
||||
// Uint64Map converts a string map of uint64 values uinto a string
|
||||
// map of uint64 pouinters
|
||||
// Uint64Map converts a string map of uint64 values into a string
|
||||
// map of uint64 pointers
|
||||
func Uint64Map(src map[string]uint64) map[string]*uint64 {
|
||||
dst := make(map[string]*uint64)
|
||||
for k, val := range src {
|
||||
|
|
@ -464,7 +464,7 @@ func Uint64Map(src map[string]uint64) map[string]*uint64 {
|
|||
return dst
|
||||
}
|
||||
|
||||
// Uint64ValueMap converts a string map of uint64 pouinters uinto a string
|
||||
// Uint64ValueMap converts a string map of uint64 pointers into a string
|
||||
// map of uint64 values
|
||||
func Uint64ValueMap(src map[string]*uint64) map[string]uint64 {
|
||||
dst := make(map[string]uint64)
|
||||
|
|
|
|||
|
|
@ -6,9 +6,11 @@ require (
|
|||
github.com/mailru/easyjson v0.0.0-20190614124828-94de47d64c63
|
||||
github.com/stretchr/testify v1.3.0
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 // indirect
|
||||
gopkg.in/yaml.v2 v2.2.2
|
||||
gopkg.in/yaml.v2 v2.2.4
|
||||
)
|
||||
|
||||
replace github.com/golang/lint => golang.org/x/lint v0.0.0-20190409202823-959b441ac422
|
||||
|
||||
replace sourcegraph.com/sourcegraph/go-diff => github.com/sourcegraph/go-diff v0.5.1
|
||||
|
||||
go 1.13
|
||||
|
|
|
|||
|
|
@ -16,5 +16,5 @@ github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UV
|
|||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I=
|
||||
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
|
|
|
|||
|
|
@ -51,7 +51,7 @@ type ejUnmarshaler interface {
|
|||
UnmarshalEasyJSON(w *jlexer.Lexer)
|
||||
}
|
||||
|
||||
// WriteJSON writes json data, prefers finding an appropriate interface to short-circuit the marshaller
|
||||
// WriteJSON writes json data, prefers finding an appropriate interface to short-circuit the marshaler
|
||||
// so it takes the fastest option available.
|
||||
func WriteJSON(data interface{}) ([]byte, error) {
|
||||
if d, ok := data.(ejMarshaler); ok {
|
||||
|
|
@ -65,8 +65,8 @@ func WriteJSON(data interface{}) ([]byte, error) {
|
|||
return json.Marshal(data)
|
||||
}
|
||||
|
||||
// ReadJSON reads json data, prefers finding an appropriate interface to short-circuit the unmarshaller
|
||||
// so it takes the fastes option available
|
||||
// ReadJSON reads json data, prefers finding an appropriate interface to short-circuit the unmarshaler
|
||||
// so it takes the fastest option available
|
||||
func ReadJSON(data []byte, value interface{}) error {
|
||||
trimmedData := bytes.Trim(data, "\x00")
|
||||
if d, ok := value.(ejUnmarshaler); ok {
|
||||
|
|
@ -189,7 +189,7 @@ func FromDynamicJSON(data, target interface{}) error {
|
|||
return json.Unmarshal(b, target)
|
||||
}
|
||||
|
||||
// NameProvider represents an object capabale of translating from go property names
|
||||
// NameProvider represents an object capable of translating from go property names
|
||||
// to json property names
|
||||
// This type is thread-safe.
|
||||
type NameProvider struct {
|
||||
|
|
|
|||
|
|
@ -189,6 +189,8 @@ type Marshaler interface {
|
|||
// prefixed by a varint-encoded length.
|
||||
func (p *Buffer) EncodeMessage(pb Message) error {
|
||||
siz := Size(pb)
|
||||
sizVar := SizeVarint(uint64(siz))
|
||||
p.grow(siz + sizVar)
|
||||
p.EncodeVarint(uint64(siz))
|
||||
return p.Marshal(pb)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -948,13 +948,19 @@ func isProto3Zero(v reflect.Value) bool {
|
|||
return false
|
||||
}
|
||||
|
||||
// ProtoPackageIsVersion2 is referenced from generated protocol buffer files
|
||||
// to assert that that code is compatible with this version of the proto package.
|
||||
const GoGoProtoPackageIsVersion2 = true
|
||||
const (
|
||||
// ProtoPackageIsVersion3 is referenced from generated protocol buffer files
|
||||
// to assert that that code is compatible with this version of the proto package.
|
||||
GoGoProtoPackageIsVersion3 = true
|
||||
|
||||
// ProtoPackageIsVersion1 is referenced from generated protocol buffer files
|
||||
// to assert that that code is compatible with this version of the proto package.
|
||||
const GoGoProtoPackageIsVersion1 = true
|
||||
// ProtoPackageIsVersion2 is referenced from generated protocol buffer files
|
||||
// to assert that that code is compatible with this version of the proto package.
|
||||
GoGoProtoPackageIsVersion2 = true
|
||||
|
||||
// ProtoPackageIsVersion1 is referenced from generated protocol buffer files
|
||||
// to assert that that code is compatible with this version of the proto package.
|
||||
GoGoProtoPackageIsVersion1 = true
|
||||
)
|
||||
|
||||
// InternalMessageInfo is a type used internally by generated .pb.go files.
|
||||
// This type is not intended to be used by non-generated code.
|
||||
|
|
|
|||
|
|
@ -43,7 +43,6 @@ package proto
|
|||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
|
|
@ -205,7 +204,7 @@ func (p *Properties) Parse(s string) {
|
|||
// "bytes,49,opt,name=foo,def=hello!"
|
||||
fields := strings.Split(s, ",") // breaks def=, but handled below.
|
||||
if len(fields) < 2 {
|
||||
fmt.Fprintf(os.Stderr, "proto: tag has too few fields: %q\n", s)
|
||||
log.Printf("proto: tag has too few fields: %q", s)
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -225,7 +224,7 @@ func (p *Properties) Parse(s string) {
|
|||
p.WireType = WireBytes
|
||||
// no numeric converter for non-numeric types
|
||||
default:
|
||||
fmt.Fprintf(os.Stderr, "proto: tag has unknown wire type: %q\n", s)
|
||||
log.Printf("proto: tag has unknown wire type: %q", s)
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -400,6 +399,15 @@ func GetProperties(t reflect.Type) *StructProperties {
|
|||
return sprop
|
||||
}
|
||||
|
||||
type (
|
||||
oneofFuncsIface interface {
|
||||
XXX_OneofFuncs() (func(Message, *Buffer) error, func(Message, int, int, *Buffer) (bool, error), func(Message) int, []interface{})
|
||||
}
|
||||
oneofWrappersIface interface {
|
||||
XXX_OneofWrappers() []interface{}
|
||||
}
|
||||
)
|
||||
|
||||
// getPropertiesLocked requires that propertiesMu is held.
|
||||
func getPropertiesLocked(t reflect.Type) *StructProperties {
|
||||
if prop, ok := propertiesMap[t]; ok {
|
||||
|
|
@ -441,37 +449,40 @@ func getPropertiesLocked(t reflect.Type) *StructProperties {
|
|||
// Re-order prop.order.
|
||||
sort.Sort(prop)
|
||||
|
||||
type oneofMessage interface {
|
||||
XXX_OneofFuncs() (func(Message, *Buffer) error, func(Message, int, int, *Buffer) (bool, error), func(Message) int, []interface{})
|
||||
}
|
||||
if om, ok := reflect.Zero(reflect.PtrTo(t)).Interface().(oneofMessage); isOneofMessage && ok {
|
||||
if isOneofMessage {
|
||||
var oots []interface{}
|
||||
_, _, _, oots = om.XXX_OneofFuncs()
|
||||
|
||||
// Interpret oneof metadata.
|
||||
prop.OneofTypes = make(map[string]*OneofProperties)
|
||||
for _, oot := range oots {
|
||||
oop := &OneofProperties{
|
||||
Type: reflect.ValueOf(oot).Type(), // *T
|
||||
Prop: new(Properties),
|
||||
}
|
||||
sft := oop.Type.Elem().Field(0)
|
||||
oop.Prop.Name = sft.Name
|
||||
oop.Prop.Parse(sft.Tag.Get("protobuf"))
|
||||
// There will be exactly one interface field that
|
||||
// this new value is assignable to.
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
f := t.Field(i)
|
||||
if f.Type.Kind() != reflect.Interface {
|
||||
continue
|
||||
switch m := reflect.Zero(reflect.PtrTo(t)).Interface().(type) {
|
||||
case oneofFuncsIface:
|
||||
_, _, _, oots = m.XXX_OneofFuncs()
|
||||
case oneofWrappersIface:
|
||||
oots = m.XXX_OneofWrappers()
|
||||
}
|
||||
if len(oots) > 0 {
|
||||
// Interpret oneof metadata.
|
||||
prop.OneofTypes = make(map[string]*OneofProperties)
|
||||
for _, oot := range oots {
|
||||
oop := &OneofProperties{
|
||||
Type: reflect.ValueOf(oot).Type(), // *T
|
||||
Prop: new(Properties),
|
||||
}
|
||||
if !oop.Type.AssignableTo(f.Type) {
|
||||
continue
|
||||
sft := oop.Type.Elem().Field(0)
|
||||
oop.Prop.Name = sft.Name
|
||||
oop.Prop.Parse(sft.Tag.Get("protobuf"))
|
||||
// There will be exactly one interface field that
|
||||
// this new value is assignable to.
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
f := t.Field(i)
|
||||
if f.Type.Kind() != reflect.Interface {
|
||||
continue
|
||||
}
|
||||
if !oop.Type.AssignableTo(f.Type) {
|
||||
continue
|
||||
}
|
||||
oop.Field = i
|
||||
break
|
||||
}
|
||||
oop.Field = i
|
||||
break
|
||||
prop.OneofTypes[oop.Prop.OrigName] = oop
|
||||
}
|
||||
prop.OneofTypes[oop.Prop.OrigName] = oop
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -389,8 +389,13 @@ func (u *marshalInfo) computeMarshalInfo() {
|
|||
// get oneof implementers
|
||||
var oneofImplementers []interface{}
|
||||
// gogo: isOneofMessage is needed for embedded oneof messages, without a marshaler and unmarshaler
|
||||
if m, ok := reflect.Zero(reflect.PtrTo(t)).Interface().(oneofMessage); ok && isOneofMessage {
|
||||
_, _, _, oneofImplementers = m.XXX_OneofFuncs()
|
||||
if isOneofMessage {
|
||||
switch m := reflect.Zero(reflect.PtrTo(t)).Interface().(type) {
|
||||
case oneofFuncsIface:
|
||||
_, _, _, oneofImplementers = m.XXX_OneofFuncs()
|
||||
case oneofWrappersIface:
|
||||
oneofImplementers = m.XXX_OneofWrappers()
|
||||
}
|
||||
}
|
||||
|
||||
// normal fields
|
||||
|
|
@ -519,10 +524,6 @@ func (fi *marshalFieldInfo) computeOneofFieldInfo(f *reflect.StructField, oneofI
|
|||
}
|
||||
}
|
||||
|
||||
type oneofMessage interface {
|
||||
XXX_OneofFuncs() (func(Message, *Buffer) error, func(Message, int, int, *Buffer) (bool, error), func(Message) int, []interface{})
|
||||
}
|
||||
|
||||
// wiretype returns the wire encoding of the type.
|
||||
func wiretype(encoding string) uint64 {
|
||||
switch encoding {
|
||||
|
|
@ -2968,7 +2969,9 @@ func (p *Buffer) Marshal(pb Message) error {
|
|||
if m, ok := pb.(newMarshaler); ok {
|
||||
siz := m.XXX_Size()
|
||||
p.grow(siz) // make sure buf has enough capacity
|
||||
p.buf, err = m.XXX_Marshal(p.buf, p.deterministic)
|
||||
pp := p.buf[len(p.buf) : len(p.buf) : len(p.buf)+siz]
|
||||
pp, err = m.XXX_Marshal(pp, p.deterministic)
|
||||
p.buf = append(p.buf, pp...)
|
||||
return err
|
||||
}
|
||||
if m, ok := pb.(Marshaler); ok {
|
||||
|
|
|
|||
|
|
@ -530,6 +530,25 @@ func (mi *mergeInfo) computeMergeInfo() {
|
|||
}
|
||||
case reflect.Struct:
|
||||
switch {
|
||||
case isSlice && !isPointer: // E.g. []pb.T
|
||||
mergeInfo := getMergeInfo(tf)
|
||||
zero := reflect.Zero(tf)
|
||||
mfi.merge = func(dst, src pointer) {
|
||||
// TODO: Make this faster?
|
||||
dstsp := dst.asPointerTo(f.Type)
|
||||
dsts := dstsp.Elem()
|
||||
srcs := src.asPointerTo(f.Type).Elem()
|
||||
for i := 0; i < srcs.Len(); i++ {
|
||||
dsts = reflect.Append(dsts, zero)
|
||||
srcElement := srcs.Index(i).Addr()
|
||||
dstElement := dsts.Index(dsts.Len() - 1).Addr()
|
||||
mergeInfo.merge(valToPointer(dstElement), valToPointer(srcElement))
|
||||
}
|
||||
if dsts.IsNil() {
|
||||
dsts = reflect.MakeSlice(f.Type, 0, 0)
|
||||
}
|
||||
dstsp.Elem().Set(dsts)
|
||||
}
|
||||
case !isPointer:
|
||||
mergeInfo := getMergeInfo(tf)
|
||||
mfi.merge = func(dst, src pointer) {
|
||||
|
|
|
|||
|
|
@ -371,15 +371,18 @@ func (u *unmarshalInfo) computeUnmarshalInfo() {
|
|||
}
|
||||
|
||||
// Find any types associated with oneof fields.
|
||||
// TODO: XXX_OneofFuncs returns more info than we need. Get rid of some of it?
|
||||
fn := reflect.Zero(reflect.PtrTo(t)).MethodByName("XXX_OneofFuncs")
|
||||
// gogo: len(oneofFields) > 0 is needed for embedded oneof messages, without a marshaler and unmarshaler
|
||||
if fn.IsValid() && len(oneofFields) > 0 {
|
||||
res := fn.Call(nil)[3] // last return value from XXX_OneofFuncs: []interface{}
|
||||
for i := res.Len() - 1; i >= 0; i-- {
|
||||
v := res.Index(i) // interface{}
|
||||
tptr := reflect.ValueOf(v.Interface()).Type() // *Msg_X
|
||||
typ := tptr.Elem() // Msg_X
|
||||
if len(oneofFields) > 0 {
|
||||
var oneofImplementers []interface{}
|
||||
switch m := reflect.Zero(reflect.PtrTo(t)).Interface().(type) {
|
||||
case oneofFuncsIface:
|
||||
_, _, _, oneofImplementers = m.XXX_OneofFuncs()
|
||||
case oneofWrappersIface:
|
||||
oneofImplementers = m.XXX_OneofWrappers()
|
||||
}
|
||||
for _, v := range oneofImplementers {
|
||||
tptr := reflect.TypeOf(v) // *Msg_X
|
||||
typ := tptr.Elem() // Msg_X
|
||||
|
||||
f := typ.Field(0) // oneof implementers have one field
|
||||
baseUnmarshal := fieldUnmarshaler(&f)
|
||||
|
|
@ -407,11 +410,12 @@ func (u *unmarshalInfo) computeUnmarshalInfo() {
|
|||
u.setTag(fieldNum, of.field, unmarshal, 0, name)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
// Get extension ranges, if any.
|
||||
fn = reflect.Zero(reflect.PtrTo(t)).MethodByName("ExtensionRangeArray")
|
||||
fn := reflect.Zero(reflect.PtrTo(t)).MethodByName("ExtensionRangeArray")
|
||||
if fn.IsValid() {
|
||||
if !u.extensions.IsValid() && !u.oldExtensions.IsValid() && !u.bytesExtensions.IsValid() {
|
||||
panic("a message with extensions, but no extensions field in " + t.Name())
|
||||
|
|
|
|||
|
|
@ -476,6 +476,8 @@ func (tm *TextMarshaler) writeStruct(w *textWriter, sv reflect.Value) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
var textMarshalerType = reflect.TypeOf((*encoding.TextMarshaler)(nil)).Elem()
|
||||
|
||||
// writeAny writes an arbitrary field.
|
||||
func (tm *TextMarshaler) writeAny(w *textWriter, v reflect.Value, props *Properties) error {
|
||||
v = reflect.Indirect(v)
|
||||
|
|
@ -589,8 +591,8 @@ func (tm *TextMarshaler) writeAny(w *textWriter, v reflect.Value, props *Propert
|
|||
// mutating this value.
|
||||
v = v.Addr()
|
||||
}
|
||||
if etm, ok := v.Interface().(encoding.TextMarshaler); ok {
|
||||
text, err := etm.MarshalText()
|
||||
if v.Type().Implements(textMarshalerType) {
|
||||
text, err := v.Interface().(encoding.TextMarshaler).MarshalText()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -393,7 +393,7 @@ func (p *Buffer) Bytes() []byte { return p.buf }
|
|||
// than relying on this API.
|
||||
//
|
||||
// If deterministic serialization is requested, map entries will be sorted
|
||||
// by keys in lexographical order. This is an implementation detail and
|
||||
// by keys in lexicographical order. This is an implementation detail and
|
||||
// subject to change.
|
||||
func (p *Buffer) SetDeterministic(deterministic bool) {
|
||||
p.deterministic = deterministic
|
||||
|
|
|
|||
|
|
@ -456,6 +456,8 @@ func (tm *TextMarshaler) writeStruct(w *textWriter, sv reflect.Value) error {
|
|||
return nil
|
||||
}
|
||||
|
||||
var textMarshalerType = reflect.TypeOf((*encoding.TextMarshaler)(nil)).Elem()
|
||||
|
||||
// writeAny writes an arbitrary field.
|
||||
func (tm *TextMarshaler) writeAny(w *textWriter, v reflect.Value, props *Properties) error {
|
||||
v = reflect.Indirect(v)
|
||||
|
|
@ -519,8 +521,8 @@ func (tm *TextMarshaler) writeAny(w *textWriter, v reflect.Value, props *Propert
|
|||
// mutating this value.
|
||||
v = v.Addr()
|
||||
}
|
||||
if etm, ok := v.Interface().(encoding.TextMarshaler); ok {
|
||||
text, err := etm.MarshalText()
|
||||
if v.Type().Implements(textMarshalerType) {
|
||||
text, err := v.Interface().(encoding.TextMarshaler).MarshalText()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
|||
|
|
@ -102,7 +102,8 @@ const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
|
|||
//
|
||||
type Any struct {
|
||||
// A URL/resource name that uniquely identifies the type of the serialized
|
||||
// protocol buffer message. The last segment of the URL's path must represent
|
||||
// protocol buffer message. This string must contain at least
|
||||
// one "/" character. The last segment of the URL's path must represent
|
||||
// the fully qualified name of the type (as in
|
||||
// `path/google.protobuf.Duration`). The name should be in a canonical form
|
||||
// (e.g., leading "." is not accepted).
|
||||
|
|
@ -181,7 +182,9 @@ func init() {
|
|||
proto.RegisterType((*Any)(nil), "google.protobuf.Any")
|
||||
}
|
||||
|
||||
func init() { proto.RegisterFile("google/protobuf/any.proto", fileDescriptor_b53526c13ae22eb4) }
|
||||
func init() {
|
||||
proto.RegisterFile("google/protobuf/any.proto", fileDescriptor_b53526c13ae22eb4)
|
||||
}
|
||||
|
||||
var fileDescriptor_b53526c13ae22eb4 = []byte{
|
||||
// 185 bytes of a gzipped FileDescriptorProto
|
||||
|
|
|
|||
|
|
@ -121,7 +121,8 @@ option objc_class_prefix = "GPB";
|
|||
//
|
||||
message Any {
|
||||
// A URL/resource name that uniquely identifies the type of the serialized
|
||||
// protocol buffer message. The last segment of the URL's path must represent
|
||||
// protocol buffer message. This string must contain at least
|
||||
// one "/" character. The last segment of the URL's path must represent
|
||||
// the fully qualified name of the type (as in
|
||||
// `path/google.protobuf.Duration`). The name should be in a canonical form
|
||||
// (e.g., leading "." is not accepted).
|
||||
|
|
|
|||
|
|
@ -41,7 +41,7 @@ const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
|
|||
// if (duration.seconds < 0 && duration.nanos > 0) {
|
||||
// duration.seconds += 1;
|
||||
// duration.nanos -= 1000000000;
|
||||
// } else if (durations.seconds > 0 && duration.nanos < 0) {
|
||||
// } else if (duration.seconds > 0 && duration.nanos < 0) {
|
||||
// duration.seconds -= 1;
|
||||
// duration.nanos += 1000000000;
|
||||
// }
|
||||
|
|
@ -142,7 +142,9 @@ func init() {
|
|||
proto.RegisterType((*Duration)(nil), "google.protobuf.Duration")
|
||||
}
|
||||
|
||||
func init() { proto.RegisterFile("google/protobuf/duration.proto", fileDescriptor_23597b2ebd7ac6c5) }
|
||||
func init() {
|
||||
proto.RegisterFile("google/protobuf/duration.proto", fileDescriptor_23597b2ebd7ac6c5)
|
||||
}
|
||||
|
||||
var fileDescriptor_23597b2ebd7ac6c5 = []byte{
|
||||
// 190 bytes of a gzipped FileDescriptorProto
|
||||
|
|
|
|||
|
|
@ -61,7 +61,7 @@ option objc_class_prefix = "GPB";
|
|||
// if (duration.seconds < 0 && duration.nanos > 0) {
|
||||
// duration.seconds += 1;
|
||||
// duration.nanos -= 1000000000;
|
||||
// } else if (durations.seconds > 0 && duration.nanos < 0) {
|
||||
// } else if (duration.seconds > 0 && duration.nanos < 0) {
|
||||
// duration.seconds -= 1;
|
||||
// duration.nanos += 1000000000;
|
||||
// }
|
||||
|
|
@ -101,7 +101,6 @@ option objc_class_prefix = "GPB";
|
|||
//
|
||||
//
|
||||
message Duration {
|
||||
|
||||
// Signed seconds of the span of time. Must be from -315,576,000,000
|
||||
// to +315,576,000,000 inclusive. Note: these bounds are computed from:
|
||||
// 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
|
||||
|
|
|
|||
|
|
@ -20,17 +20,19 @@ var _ = math.Inf
|
|||
// proto package needs to be updated.
|
||||
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
|
||||
|
||||
// A Timestamp represents a point in time independent of any time zone
|
||||
// or calendar, represented as seconds and fractions of seconds at
|
||||
// nanosecond resolution in UTC Epoch time. It is encoded using the
|
||||
// Proleptic Gregorian Calendar which extends the Gregorian calendar
|
||||
// backwards to year one. It is encoded assuming all minutes are 60
|
||||
// seconds long, i.e. leap seconds are "smeared" so that no leap second
|
||||
// table is needed for interpretation. Range is from
|
||||
// 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z.
|
||||
// By restricting to that range, we ensure that we can convert to
|
||||
// and from RFC 3339 date strings.
|
||||
// See [https://www.ietf.org/rfc/rfc3339.txt](https://www.ietf.org/rfc/rfc3339.txt).
|
||||
// A Timestamp represents a point in time independent of any time zone or local
|
||||
// calendar, encoded as a count of seconds and fractions of seconds at
|
||||
// nanosecond resolution. The count is relative to an epoch at UTC midnight on
|
||||
// January 1, 1970, in the proleptic Gregorian calendar which extends the
|
||||
// Gregorian calendar backwards to year one.
|
||||
//
|
||||
// All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
|
||||
// second table is needed for interpretation, using a [24-hour linear
|
||||
// smear](https://developers.google.com/time/smear).
|
||||
//
|
||||
// The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
|
||||
// restricting to that range, we ensure that we can convert to and from [RFC
|
||||
// 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
|
||||
//
|
||||
// # Examples
|
||||
//
|
||||
|
|
@ -91,12 +93,14 @@ const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
|
|||
// 01:30 UTC on January 15, 2017.
|
||||
//
|
||||
// In JavaScript, one can convert a Date object to this format using the
|
||||
// standard [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString]
|
||||
// standard
|
||||
// [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
|
||||
// method. In Python, a standard `datetime.datetime` object can be converted
|
||||
// to this format using [`strftime`](https://docs.python.org/2/library/time.html#time.strftime)
|
||||
// with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one
|
||||
// can use the Joda Time's [`ISODateTimeFormat.dateTime()`](
|
||||
// http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime--
|
||||
// to this format using
|
||||
// [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
|
||||
// the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
|
||||
// the Joda Time's [`ISODateTimeFormat.dateTime()`](
|
||||
// http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D
|
||||
// ) to obtain a formatter capable of generating timestamps in this format.
|
||||
//
|
||||
//
|
||||
|
|
@ -160,7 +164,9 @@ func init() {
|
|||
proto.RegisterType((*Timestamp)(nil), "google.protobuf.Timestamp")
|
||||
}
|
||||
|
||||
func init() { proto.RegisterFile("google/protobuf/timestamp.proto", fileDescriptor_292007bbfe81227e) }
|
||||
func init() {
|
||||
proto.RegisterFile("google/protobuf/timestamp.proto", fileDescriptor_292007bbfe81227e)
|
||||
}
|
||||
|
||||
var fileDescriptor_292007bbfe81227e = []byte{
|
||||
// 191 bytes of a gzipped FileDescriptorProto
|
||||
|
|
|
|||
|
|
@ -40,17 +40,19 @@ option java_outer_classname = "TimestampProto";
|
|||
option java_multiple_files = true;
|
||||
option objc_class_prefix = "GPB";
|
||||
|
||||
// A Timestamp represents a point in time independent of any time zone
|
||||
// or calendar, represented as seconds and fractions of seconds at
|
||||
// nanosecond resolution in UTC Epoch time. It is encoded using the
|
||||
// Proleptic Gregorian Calendar which extends the Gregorian calendar
|
||||
// backwards to year one. It is encoded assuming all minutes are 60
|
||||
// seconds long, i.e. leap seconds are "smeared" so that no leap second
|
||||
// table is needed for interpretation. Range is from
|
||||
// 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z.
|
||||
// By restricting to that range, we ensure that we can convert to
|
||||
// and from RFC 3339 date strings.
|
||||
// See [https://www.ietf.org/rfc/rfc3339.txt](https://www.ietf.org/rfc/rfc3339.txt).
|
||||
// A Timestamp represents a point in time independent of any time zone or local
|
||||
// calendar, encoded as a count of seconds and fractions of seconds at
|
||||
// nanosecond resolution. The count is relative to an epoch at UTC midnight on
|
||||
// January 1, 1970, in the proleptic Gregorian calendar which extends the
|
||||
// Gregorian calendar backwards to year one.
|
||||
//
|
||||
// All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
|
||||
// second table is needed for interpretation, using a [24-hour linear
|
||||
// smear](https://developers.google.com/time/smear).
|
||||
//
|
||||
// The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
|
||||
// restricting to that range, we ensure that we can convert to and from [RFC
|
||||
// 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
|
||||
//
|
||||
// # Examples
|
||||
//
|
||||
|
|
@ -111,17 +113,18 @@ option objc_class_prefix = "GPB";
|
|||
// 01:30 UTC on January 15, 2017.
|
||||
//
|
||||
// In JavaScript, one can convert a Date object to this format using the
|
||||
// standard [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString]
|
||||
// standard
|
||||
// [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
|
||||
// method. In Python, a standard `datetime.datetime` object can be converted
|
||||
// to this format using [`strftime`](https://docs.python.org/2/library/time.html#time.strftime)
|
||||
// with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one
|
||||
// can use the Joda Time's [`ISODateTimeFormat.dateTime()`](
|
||||
// http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime--
|
||||
// to this format using
|
||||
// [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
|
||||
// the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
|
||||
// the Joda Time's [`ISODateTimeFormat.dateTime()`](
|
||||
// http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D
|
||||
// ) to obtain a formatter capable of generating timestamps in this format.
|
||||
//
|
||||
//
|
||||
message Timestamp {
|
||||
|
||||
// Represents seconds of UTC time since Unix epoch
|
||||
// 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
|
||||
// 9999-12-31T23:59:59Z inclusive.
|
||||
|
|
|
|||
|
|
@ -8,8 +8,10 @@ package cmpopts
|
|||
import (
|
||||
"math"
|
||||
"reflect"
|
||||
"time"
|
||||
|
||||
"github.com/google/go-cmp/cmp"
|
||||
"golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
func equateAlways(_, _ interface{}) bool { return true }
|
||||
|
|
@ -87,3 +89,68 @@ func areNaNsF64s(x, y float64) bool {
|
|||
func areNaNsF32s(x, y float32) bool {
|
||||
return areNaNsF64s(float64(x), float64(y))
|
||||
}
|
||||
|
||||
// EquateApproxTime returns a Comparer option that determines two non-zero
|
||||
// time.Time values to be equal if they are within some margin of one another.
|
||||
// If both times have a monotonic clock reading, then the monotonic time
|
||||
// difference will be used. The margin must be non-negative.
|
||||
func EquateApproxTime(margin time.Duration) cmp.Option {
|
||||
if margin < 0 {
|
||||
panic("margin must be a non-negative number")
|
||||
}
|
||||
a := timeApproximator{margin}
|
||||
return cmp.FilterValues(areNonZeroTimes, cmp.Comparer(a.compare))
|
||||
}
|
||||
|
||||
func areNonZeroTimes(x, y time.Time) bool {
|
||||
return !x.IsZero() && !y.IsZero()
|
||||
}
|
||||
|
||||
type timeApproximator struct {
|
||||
margin time.Duration
|
||||
}
|
||||
|
||||
func (a timeApproximator) compare(x, y time.Time) bool {
|
||||
// Avoid subtracting times to avoid overflow when the
|
||||
// difference is larger than the largest representible duration.
|
||||
if x.After(y) {
|
||||
// Ensure x is always before y
|
||||
x, y = y, x
|
||||
}
|
||||
// We're within the margin if x+margin >= y.
|
||||
// Note: time.Time doesn't have AfterOrEqual method hence the negation.
|
||||
return !x.Add(a.margin).Before(y)
|
||||
}
|
||||
|
||||
// AnyError is an error that matches any non-nil error.
|
||||
var AnyError anyError
|
||||
|
||||
type anyError struct{}
|
||||
|
||||
func (anyError) Error() string { return "any error" }
|
||||
func (anyError) Is(err error) bool { return err != nil }
|
||||
|
||||
// EquateErrors returns a Comparer option that determines errors to be equal
|
||||
// if errors.Is reports them to match. The AnyError error can be used to
|
||||
// match any non-nil error.
|
||||
func EquateErrors() cmp.Option {
|
||||
return cmp.FilterValues(areConcreteErrors, cmp.Comparer(compareErrors))
|
||||
}
|
||||
|
||||
// areConcreteErrors reports whether x and y are types that implement error.
|
||||
// The input types are deliberately of the interface{} type rather than the
|
||||
// error type so that we can handle situations where the current type is an
|
||||
// interface{}, but the underlying concrete types both happen to implement
|
||||
// the error interface.
|
||||
func areConcreteErrors(x, y interface{}) bool {
|
||||
_, ok1 := x.(error)
|
||||
_, ok2 := y.(error)
|
||||
return ok1 && ok2
|
||||
}
|
||||
|
||||
func compareErrors(x, y interface{}) bool {
|
||||
xe := x.(error)
|
||||
ye := y.(error)
|
||||
// TODO: Use errors.Is when go1.13 is the minimally supported version of Go.
|
||||
return xerrors.Is(xe, ye) || xerrors.Is(ye, xe)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -22,8 +22,8 @@
|
|||
// equality is determined by recursively comparing the primitive kinds on both
|
||||
// values, much like reflect.DeepEqual. Unlike reflect.DeepEqual, unexported
|
||||
// fields are not compared by default; they result in panics unless suppressed
|
||||
// by using an Ignore option (see cmpopts.IgnoreUnexported) or explicitly compared
|
||||
// using the AllowUnexported option.
|
||||
// by using an Ignore option (see cmpopts.IgnoreUnexported) or explicitly
|
||||
// compared using the Exporter option.
|
||||
package cmp
|
||||
|
||||
import (
|
||||
|
|
@ -62,8 +62,8 @@ import (
|
|||
//
|
||||
// Structs are equal if recursively calling Equal on all fields report equal.
|
||||
// If a struct contains unexported fields, Equal panics unless an Ignore option
|
||||
// (e.g., cmpopts.IgnoreUnexported) ignores that field or the AllowUnexported
|
||||
// option explicitly permits comparing the unexported field.
|
||||
// (e.g., cmpopts.IgnoreUnexported) ignores that field or the Exporter option
|
||||
// explicitly permits comparing the unexported field.
|
||||
//
|
||||
// Slices are equal if they are both nil or both non-nil, where recursively
|
||||
// calling Equal on all non-ignored slice or array elements report equal.
|
||||
|
|
@ -80,6 +80,11 @@ import (
|
|||
// Pointers and interfaces are equal if they are both nil or both non-nil,
|
||||
// where they have the same underlying concrete type and recursively
|
||||
// calling Equal on the underlying values reports equal.
|
||||
//
|
||||
// Before recursing into a pointer, slice element, or map, the current path
|
||||
// is checked to detect whether the address has already been visited.
|
||||
// If there is a cycle, then the pointed at values are considered equal
|
||||
// only if both addresses were previously visited in the same path step.
|
||||
func Equal(x, y interface{}, opts ...Option) bool {
|
||||
vx := reflect.ValueOf(x)
|
||||
vy := reflect.ValueOf(y)
|
||||
|
|
@ -137,6 +142,7 @@ type state struct {
|
|||
// Calling statelessCompare must not result in observable changes to these.
|
||||
result diff.Result // The current result of comparison
|
||||
curPath Path // The current path in the value tree
|
||||
curPtrs pointerPath // The current set of visited pointers
|
||||
reporters []reporter // Optional reporters
|
||||
|
||||
// recChecker checks for infinite cycles applying the same set of
|
||||
|
|
@ -148,13 +154,14 @@ type state struct {
|
|||
dynChecker dynChecker
|
||||
|
||||
// These fields, once set by processOption, will not change.
|
||||
exporters map[reflect.Type]bool // Set of structs with unexported field visibility
|
||||
opts Options // List of all fundamental and filter options
|
||||
exporters []exporter // List of exporters for structs with unexported fields
|
||||
opts Options // List of all fundamental and filter options
|
||||
}
|
||||
|
||||
func newState(opts []Option) *state {
|
||||
// Always ensure a validator option exists to validate the inputs.
|
||||
s := &state{opts: Options{validator{}}}
|
||||
s.curPtrs.Init()
|
||||
s.processOption(Options(opts))
|
||||
return s
|
||||
}
|
||||
|
|
@ -174,13 +181,8 @@ func (s *state) processOption(opt Option) {
|
|||
panic(fmt.Sprintf("cannot use an unfiltered option: %v", opt))
|
||||
}
|
||||
s.opts = append(s.opts, opt)
|
||||
case visibleStructs:
|
||||
if s.exporters == nil {
|
||||
s.exporters = make(map[reflect.Type]bool)
|
||||
}
|
||||
for t := range opt {
|
||||
s.exporters[t] = true
|
||||
}
|
||||
case exporter:
|
||||
s.exporters = append(s.exporters, opt)
|
||||
case reporter:
|
||||
s.reporters = append(s.reporters, opt)
|
||||
default:
|
||||
|
|
@ -192,9 +194,9 @@ func (s *state) processOption(opt Option) {
|
|||
// This function is stateless in that it does not alter the current result,
|
||||
// or output to any registered reporters.
|
||||
func (s *state) statelessCompare(step PathStep) diff.Result {
|
||||
// We do not save and restore the curPath because all of the compareX
|
||||
// methods should properly push and pop from the path.
|
||||
// It is an implementation bug if the contents of curPath differs from
|
||||
// We do not save and restore curPath and curPtrs because all of the
|
||||
// compareX methods should properly push and pop from them.
|
||||
// It is an implementation bug if the contents of the paths differ from
|
||||
// when calling this function to when returning from it.
|
||||
|
||||
oldResult, oldReporters := s.result, s.reporters
|
||||
|
|
@ -216,9 +218,17 @@ func (s *state) compareAny(step PathStep) {
|
|||
}
|
||||
s.recChecker.Check(s.curPath)
|
||||
|
||||
// Obtain the current type and values.
|
||||
// Cycle-detection for slice elements (see NOTE in compareSlice).
|
||||
t := step.Type()
|
||||
vx, vy := step.Values()
|
||||
if si, ok := step.(SliceIndex); ok && si.isSlice && vx.IsValid() && vy.IsValid() {
|
||||
px, py := vx.Addr(), vy.Addr()
|
||||
if eq, visited := s.curPtrs.Push(px, py); visited {
|
||||
s.report(eq, reportByCycle)
|
||||
return
|
||||
}
|
||||
defer s.curPtrs.Pop(px, py)
|
||||
}
|
||||
|
||||
// Rule 1: Check whether an option applies on this node in the value tree.
|
||||
if s.tryOptions(t, vx, vy) {
|
||||
|
|
@ -354,6 +364,7 @@ func sanitizeValue(v reflect.Value, t reflect.Type) reflect.Value {
|
|||
func (s *state) compareStruct(t reflect.Type, vx, vy reflect.Value) {
|
||||
var vax, vay reflect.Value // Addressable versions of vx and vy
|
||||
|
||||
var mayForce, mayForceInit bool
|
||||
step := StructField{&structField{}}
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
step.typ = t.Field(i).Type
|
||||
|
|
@ -375,7 +386,13 @@ func (s *state) compareStruct(t reflect.Type, vx, vy reflect.Value) {
|
|||
vax = makeAddressable(vx)
|
||||
vay = makeAddressable(vy)
|
||||
}
|
||||
step.mayForce = s.exporters[t]
|
||||
if !mayForceInit {
|
||||
for _, xf := range s.exporters {
|
||||
mayForce = mayForce || xf(t)
|
||||
}
|
||||
mayForceInit = true
|
||||
}
|
||||
step.mayForce = mayForce
|
||||
step.pvx = vax
|
||||
step.pvy = vay
|
||||
step.field = t.Field(i)
|
||||
|
|
@ -391,9 +408,21 @@ func (s *state) compareSlice(t reflect.Type, vx, vy reflect.Value) {
|
|||
return
|
||||
}
|
||||
|
||||
// TODO: Support cyclic data structures.
|
||||
// NOTE: It is incorrect to call curPtrs.Push on the slice header pointer
|
||||
// since slices represents a list of pointers, rather than a single pointer.
|
||||
// The pointer checking logic must be handled on a per-element basis
|
||||
// in compareAny.
|
||||
//
|
||||
// A slice header (see reflect.SliceHeader) in Go is a tuple of a starting
|
||||
// pointer P, a length N, and a capacity C. Supposing each slice element has
|
||||
// a memory size of M, then the slice is equivalent to the list of pointers:
|
||||
// [P+i*M for i in range(N)]
|
||||
//
|
||||
// For example, v[:0] and v[:1] are slices with the same starting pointer,
|
||||
// but they are clearly different values. Using the slice pointer alone
|
||||
// violates the assumption that equal pointers implies equal values.
|
||||
|
||||
step := SliceIndex{&sliceIndex{pathStep: pathStep{typ: t.Elem()}}}
|
||||
step := SliceIndex{&sliceIndex{pathStep: pathStep{typ: t.Elem()}, isSlice: isSlice}}
|
||||
withIndexes := func(ix, iy int) SliceIndex {
|
||||
if ix >= 0 {
|
||||
step.vx, step.xkey = vx.Index(ix), ix
|
||||
|
|
@ -470,7 +499,12 @@ func (s *state) compareMap(t reflect.Type, vx, vy reflect.Value) {
|
|||
return
|
||||
}
|
||||
|
||||
// TODO: Support cyclic data structures.
|
||||
// Cycle-detection for maps.
|
||||
if eq, visited := s.curPtrs.Push(vx, vy); visited {
|
||||
s.report(eq, reportByCycle)
|
||||
return
|
||||
}
|
||||
defer s.curPtrs.Pop(vx, vy)
|
||||
|
||||
// We combine and sort the two map keys so that we can perform the
|
||||
// comparisons in a deterministic order.
|
||||
|
|
@ -507,7 +541,12 @@ func (s *state) comparePtr(t reflect.Type, vx, vy reflect.Value) {
|
|||
return
|
||||
}
|
||||
|
||||
// TODO: Support cyclic data structures.
|
||||
// Cycle-detection for pointers.
|
||||
if eq, visited := s.curPtrs.Push(vx, vy); visited {
|
||||
s.report(eq, reportByCycle)
|
||||
return
|
||||
}
|
||||
defer s.curPtrs.Pop(vx, vy)
|
||||
|
||||
vx, vy = vx.Elem(), vy.Elem()
|
||||
s.compareAny(Indirect{&indirect{pathStep{t.Elem(), vx, vy}}})
|
||||
|
|
|
|||
|
|
@ -8,8 +8,8 @@ package cmp
|
|||
|
||||
import "reflect"
|
||||
|
||||
const supportAllowUnexported = false
|
||||
const supportExporters = false
|
||||
|
||||
func retrieveUnexportedField(reflect.Value, reflect.StructField) reflect.Value {
|
||||
panic("retrieveUnexportedField is not implemented")
|
||||
panic("no support for forcibly accessing unexported fields")
|
||||
}
|
||||
|
|
|
|||
|
|
@ -11,7 +11,7 @@ import (
|
|||
"unsafe"
|
||||
)
|
||||
|
||||
const supportAllowUnexported = true
|
||||
const supportExporters = true
|
||||
|
||||
// retrieveUnexportedField uses unsafe to forcibly retrieve any field from
|
||||
// a struct such that the value has read-write permissions.
|
||||
|
|
@ -19,5 +19,7 @@ const supportAllowUnexported = true
|
|||
// The parent struct, v, must be addressable, while f must be a StructField
|
||||
// describing the field to retrieve.
|
||||
func retrieveUnexportedField(v reflect.Value, f reflect.StructField) reflect.Value {
|
||||
return reflect.NewAt(f.Type, unsafe.Pointer(v.UnsafeAddr()+f.Offset)).Elem()
|
||||
// See https://github.com/google/go-cmp/issues/167 for discussion of the
|
||||
// following expression.
|
||||
return reflect.NewAt(f.Type, unsafe.Pointer(uintptr(unsafe.Pointer(v.UnsafeAddr()))+f.Offset)).Elem()
|
||||
}
|
||||
|
|
|
|||
|
|
@ -225,8 +225,20 @@ func (validator) apply(s *state, vx, vy reflect.Value) {
|
|||
|
||||
// Unable to Interface implies unexported field without visibility access.
|
||||
if !vx.CanInterface() || !vy.CanInterface() {
|
||||
const help = "consider using a custom Comparer; if you control the implementation of type, you can also consider AllowUnexported or cmpopts.IgnoreUnexported"
|
||||
panic(fmt.Sprintf("cannot handle unexported field: %#v\n%s", s.curPath, help))
|
||||
const help = "consider using a custom Comparer; if you control the implementation of type, you can also consider using an Exporter, AllowUnexported, or cmpopts.IgnoreUnexported"
|
||||
var name string
|
||||
if t := s.curPath.Index(-2).Type(); t.Name() != "" {
|
||||
// Named type with unexported fields.
|
||||
name = fmt.Sprintf("%q.%v", t.PkgPath(), t.Name()) // e.g., "path/to/package".MyType
|
||||
} else {
|
||||
// Unnamed type with unexported fields. Derive PkgPath from field.
|
||||
var pkgPath string
|
||||
for i := 0; i < t.NumField() && pkgPath == ""; i++ {
|
||||
pkgPath = t.Field(i).PkgPath
|
||||
}
|
||||
name = fmt.Sprintf("%q.(%v)", pkgPath, t.String()) // e.g., "path/to/package".(struct { a int })
|
||||
}
|
||||
panic(fmt.Sprintf("cannot handle unexported field at %#v:\n\t%v\n%s", s.curPath, name, help))
|
||||
}
|
||||
|
||||
panic("not reachable")
|
||||
|
|
@ -360,9 +372,8 @@ func (cm comparer) String() string {
|
|||
return fmt.Sprintf("Comparer(%s)", function.NameOf(cm.fnc))
|
||||
}
|
||||
|
||||
// AllowUnexported returns an Option that forcibly allows operations on
|
||||
// unexported fields in certain structs, which are specified by passing in a
|
||||
// value of each struct type.
|
||||
// Exporter returns an Option that specifies whether Equal is allowed to
|
||||
// introspect into the unexported fields of certain struct types.
|
||||
//
|
||||
// Users of this option must understand that comparing on unexported fields
|
||||
// from external packages is not safe since changes in the internal
|
||||
|
|
@ -386,10 +397,24 @@ func (cm comparer) String() string {
|
|||
//
|
||||
// In other cases, the cmpopts.IgnoreUnexported option can be used to ignore
|
||||
// all unexported fields on specified struct types.
|
||||
func AllowUnexported(types ...interface{}) Option {
|
||||
if !supportAllowUnexported {
|
||||
panic("AllowUnexported is not supported on purego builds, Google App Engine Standard, or GopherJS")
|
||||
func Exporter(f func(reflect.Type) bool) Option {
|
||||
if !supportExporters {
|
||||
panic("Exporter is not supported on purego builds")
|
||||
}
|
||||
return exporter(f)
|
||||
}
|
||||
|
||||
type exporter func(reflect.Type) bool
|
||||
|
||||
func (exporter) filter(_ *state, _ reflect.Type, _, _ reflect.Value) applicableOption {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
// AllowUnexported returns an Options that allows Equal to forcibly introspect
|
||||
// unexported fields of the specified struct types.
|
||||
//
|
||||
// See Exporter for the proper use of this option.
|
||||
func AllowUnexported(types ...interface{}) Option {
|
||||
m := make(map[reflect.Type]bool)
|
||||
for _, typ := range types {
|
||||
t := reflect.TypeOf(typ)
|
||||
|
|
@ -398,13 +423,7 @@ func AllowUnexported(types ...interface{}) Option {
|
|||
}
|
||||
m[t] = true
|
||||
}
|
||||
return visibleStructs(m)
|
||||
}
|
||||
|
||||
type visibleStructs map[reflect.Type]bool
|
||||
|
||||
func (visibleStructs) filter(_ *state, _ reflect.Type, _, _ reflect.Value) applicableOption {
|
||||
panic("not implemented")
|
||||
return exporter(func(t reflect.Type) bool { return m[t] })
|
||||
}
|
||||
|
||||
// Result represents the comparison result for a single node and
|
||||
|
|
@ -436,6 +455,11 @@ func (r Result) ByFunc() bool {
|
|||
return r.flags&reportByFunc != 0
|
||||
}
|
||||
|
||||
// ByCycle reports whether a reference cycle was detected.
|
||||
func (r Result) ByCycle() bool {
|
||||
return r.flags&reportByCycle != 0
|
||||
}
|
||||
|
||||
type resultFlags uint
|
||||
|
||||
const (
|
||||
|
|
@ -446,6 +470,7 @@ const (
|
|||
reportByIgnore
|
||||
reportByMethod
|
||||
reportByFunc
|
||||
reportByCycle
|
||||
)
|
||||
|
||||
// Reporter is an Option that can be passed to Equal. When Equal traverses
|
||||
|
|
|
|||
|
|
@ -10,6 +10,8 @@ import (
|
|||
"strings"
|
||||
"unicode"
|
||||
"unicode/utf8"
|
||||
|
||||
"github.com/google/go-cmp/cmp/internal/value"
|
||||
)
|
||||
|
||||
// Path is a list of PathSteps describing the sequence of operations to get
|
||||
|
|
@ -41,7 +43,7 @@ type PathStep interface {
|
|||
// In some cases, one or both may be invalid or have restrictions:
|
||||
// • For StructField, both are not interface-able if the current field
|
||||
// is unexported and the struct type is not explicitly permitted by
|
||||
// AllowUnexported to traverse unexported fields.
|
||||
// an Exporter to traverse unexported fields.
|
||||
// • For SliceIndex, one may be invalid if an element is missing from
|
||||
// either the x or y slice.
|
||||
// • For MapIndex, one may be invalid if an entry is missing from
|
||||
|
|
@ -207,6 +209,7 @@ type SliceIndex struct{ *sliceIndex }
|
|||
type sliceIndex struct {
|
||||
pathStep
|
||||
xkey, ykey int
|
||||
isSlice bool // False for reflect.Array
|
||||
}
|
||||
|
||||
func (si SliceIndex) Type() reflect.Type { return si.typ }
|
||||
|
|
@ -301,6 +304,72 @@ func (tf Transform) Func() reflect.Value { return tf.trans.fnc }
|
|||
// The == operator can be used to detect the exact option used.
|
||||
func (tf Transform) Option() Option { return tf.trans }
|
||||
|
||||
// pointerPath represents a dual-stack of pointers encountered when
|
||||
// recursively traversing the x and y values. This data structure supports
|
||||
// detection of cycles and determining whether the cycles are equal.
|
||||
// In Go, cycles can occur via pointers, slices, and maps.
|
||||
//
|
||||
// The pointerPath uses a map to represent a stack; where descension into a
|
||||
// pointer pushes the address onto the stack, and ascension from a pointer
|
||||
// pops the address from the stack. Thus, when traversing into a pointer from
|
||||
// reflect.Ptr, reflect.Slice element, or reflect.Map, we can detect cycles
|
||||
// by checking whether the pointer has already been visited. The cycle detection
|
||||
// uses a seperate stack for the x and y values.
|
||||
//
|
||||
// If a cycle is detected we need to determine whether the two pointers
|
||||
// should be considered equal. The definition of equality chosen by Equal
|
||||
// requires two graphs to have the same structure. To determine this, both the
|
||||
// x and y values must have a cycle where the previous pointers were also
|
||||
// encountered together as a pair.
|
||||
//
|
||||
// Semantically, this is equivalent to augmenting Indirect, SliceIndex, and
|
||||
// MapIndex with pointer information for the x and y values.
|
||||
// Suppose px and py are two pointers to compare, we then search the
|
||||
// Path for whether px was ever encountered in the Path history of x, and
|
||||
// similarly so with py. If either side has a cycle, the comparison is only
|
||||
// equal if both px and py have a cycle resulting from the same PathStep.
|
||||
//
|
||||
// Using a map as a stack is more performant as we can perform cycle detection
|
||||
// in O(1) instead of O(N) where N is len(Path).
|
||||
type pointerPath struct {
|
||||
// mx is keyed by x pointers, where the value is the associated y pointer.
|
||||
mx map[value.Pointer]value.Pointer
|
||||
// my is keyed by y pointers, where the value is the associated x pointer.
|
||||
my map[value.Pointer]value.Pointer
|
||||
}
|
||||
|
||||
func (p *pointerPath) Init() {
|
||||
p.mx = make(map[value.Pointer]value.Pointer)
|
||||
p.my = make(map[value.Pointer]value.Pointer)
|
||||
}
|
||||
|
||||
// Push indicates intent to descend into pointers vx and vy where
|
||||
// visited reports whether either has been seen before. If visited before,
|
||||
// equal reports whether both pointers were encountered together.
|
||||
// Pop must be called if and only if the pointers were never visited.
|
||||
//
|
||||
// The pointers vx and vy must be a reflect.Ptr, reflect.Slice, or reflect.Map
|
||||
// and be non-nil.
|
||||
func (p pointerPath) Push(vx, vy reflect.Value) (equal, visited bool) {
|
||||
px := value.PointerOf(vx)
|
||||
py := value.PointerOf(vy)
|
||||
_, ok1 := p.mx[px]
|
||||
_, ok2 := p.my[py]
|
||||
if ok1 || ok2 {
|
||||
equal = p.mx[px] == py && p.my[py] == px // Pointers paired together
|
||||
return equal, true
|
||||
}
|
||||
p.mx[px] = py
|
||||
p.my[py] = px
|
||||
return false, false
|
||||
}
|
||||
|
||||
// Pop ascends from pointers vx and vy.
|
||||
func (p pointerPath) Pop(vx, vy reflect.Value) {
|
||||
delete(p.mx, value.PointerOf(vx))
|
||||
delete(p.my, value.PointerOf(vy))
|
||||
}
|
||||
|
||||
// isExported reports whether the identifier is exported.
|
||||
func isExported(id string) bool {
|
||||
r, _ := utf8.DecodeRuneInString(id)
|
||||
|
|
|
|||
|
|
@ -3,7 +3,7 @@ gofuzz
|
|||
|
||||
gofuzz is a library for populating go objects with random values.
|
||||
|
||||
[](https://godoc.org/github.com/google/gofuzz)
|
||||
[](https://godoc.org/github.com/google/gofuzz)
|
||||
[](https://travis-ci.org/google/gofuzz)
|
||||
|
||||
This is useful for testing:
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ import (
|
|||
"fmt"
|
||||
"math/rand"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"time"
|
||||
)
|
||||
|
||||
|
|
@ -28,13 +29,14 @@ type fuzzFuncMap map[reflect.Type]reflect.Value
|
|||
|
||||
// Fuzzer knows how to fill any object with random fields.
|
||||
type Fuzzer struct {
|
||||
fuzzFuncs fuzzFuncMap
|
||||
defaultFuzzFuncs fuzzFuncMap
|
||||
r *rand.Rand
|
||||
nilChance float64
|
||||
minElements int
|
||||
maxElements int
|
||||
maxDepth int
|
||||
fuzzFuncs fuzzFuncMap
|
||||
defaultFuzzFuncs fuzzFuncMap
|
||||
r *rand.Rand
|
||||
nilChance float64
|
||||
minElements int
|
||||
maxElements int
|
||||
maxDepth int
|
||||
skipFieldPatterns []*regexp.Regexp
|
||||
}
|
||||
|
||||
// New returns a new Fuzzer. Customize your Fuzzer further by calling Funcs,
|
||||
|
|
@ -150,6 +152,13 @@ func (f *Fuzzer) MaxDepth(d int) *Fuzzer {
|
|||
return f
|
||||
}
|
||||
|
||||
// Skip fields which match the supplied pattern. Call this multiple times if needed
|
||||
// This is useful to skip XXX_ fields generated by protobuf
|
||||
func (f *Fuzzer) SkipFieldsWithPattern(pattern *regexp.Regexp) *Fuzzer {
|
||||
f.skipFieldPatterns = append(f.skipFieldPatterns, pattern)
|
||||
return f
|
||||
}
|
||||
|
||||
// Fuzz recursively fills all of obj's fields with something random. First
|
||||
// this tries to find a custom fuzz function (see Funcs). If there is no
|
||||
// custom function this tests whether the object implements fuzz.Interface and,
|
||||
|
|
@ -274,7 +283,17 @@ func (fc *fuzzerContext) doFuzz(v reflect.Value, flags uint64) {
|
|||
v.Set(reflect.Zero(v.Type()))
|
||||
case reflect.Struct:
|
||||
for i := 0; i < v.NumField(); i++ {
|
||||
fc.doFuzz(v.Field(i), 0)
|
||||
skipField := false
|
||||
fieldName := v.Type().Field(i).Name
|
||||
for _, pattern := range fc.fuzzer.skipFieldPatterns {
|
||||
if pattern.MatchString(fieldName) {
|
||||
skipField = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !skipField {
|
||||
fc.doFuzz(v.Field(i), 0)
|
||||
}
|
||||
}
|
||||
case reflect.Chan:
|
||||
fallthrough
|
||||
|
|
|
|||
|
|
@ -7105,15 +7105,15 @@ func (m *Any) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of ApiKeySecurity suitable for JSON or YAML export.
|
||||
func (m *ApiKeySecurity) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m.Type != "" {
|
||||
info = append(info, yaml.MapItem{Key: "type", Value: m.Type})
|
||||
}
|
||||
if m.Name != "" {
|
||||
info = append(info, yaml.MapItem{Key: "name", Value: m.Name})
|
||||
}
|
||||
if m.In != "" {
|
||||
info = append(info, yaml.MapItem{Key: "in", Value: m.In})
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "type", Value: m.Type})
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "name", Value: m.Name})
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "in", Value: m.In})
|
||||
if m.Description != "" {
|
||||
info = append(info, yaml.MapItem{Key: "description", Value: m.Description})
|
||||
}
|
||||
|
|
@ -7129,9 +7129,11 @@ func (m *ApiKeySecurity) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of BasicAuthenticationSecurity suitable for JSON or YAML export.
|
||||
func (m *BasicAuthenticationSecurity) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m.Type != "" {
|
||||
info = append(info, yaml.MapItem{Key: "type", Value: m.Type})
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "type", Value: m.Type})
|
||||
if m.Description != "" {
|
||||
info = append(info, yaml.MapItem{Key: "description", Value: m.Description})
|
||||
}
|
||||
|
|
@ -7147,21 +7149,21 @@ func (m *BasicAuthenticationSecurity) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of BodyParameter suitable for JSON or YAML export.
|
||||
func (m *BodyParameter) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.Description != "" {
|
||||
info = append(info, yaml.MapItem{Key: "description", Value: m.Description})
|
||||
}
|
||||
if m.Name != "" {
|
||||
info = append(info, yaml.MapItem{Key: "name", Value: m.Name})
|
||||
}
|
||||
if m.In != "" {
|
||||
info = append(info, yaml.MapItem{Key: "in", Value: m.In})
|
||||
}
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "name", Value: m.Name})
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "in", Value: m.In})
|
||||
if m.Required != false {
|
||||
info = append(info, yaml.MapItem{Key: "required", Value: m.Required})
|
||||
}
|
||||
if m.Schema != nil {
|
||||
info = append(info, yaml.MapItem{Key: "schema", Value: m.Schema.ToRawInfo()})
|
||||
}
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "schema", Value: m.Schema.ToRawInfo()})
|
||||
// &{Name:schema Type:Schema StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:}
|
||||
if m.VendorExtension != nil {
|
||||
for _, item := range m.VendorExtension {
|
||||
|
|
@ -7175,6 +7177,9 @@ func (m *BodyParameter) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of Contact suitable for JSON or YAML export.
|
||||
func (m *Contact) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.Name != "" {
|
||||
info = append(info, yaml.MapItem{Key: "name", Value: m.Name})
|
||||
}
|
||||
|
|
@ -7196,6 +7201,9 @@ func (m *Contact) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of Default suitable for JSON or YAML export.
|
||||
func (m *Default) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.AdditionalProperties != nil {
|
||||
for _, item := range m.AdditionalProperties {
|
||||
info = append(info, yaml.MapItem{Key: item.Name, Value: item.Value.ToRawInfo()})
|
||||
|
|
@ -7208,6 +7216,9 @@ func (m *Default) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of Definitions suitable for JSON or YAML export.
|
||||
func (m *Definitions) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.AdditionalProperties != nil {
|
||||
for _, item := range m.AdditionalProperties {
|
||||
info = append(info, yaml.MapItem{Key: item.Name, Value: item.Value.ToRawInfo()})
|
||||
|
|
@ -7220,12 +7231,13 @@ func (m *Definitions) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of Document suitable for JSON or YAML export.
|
||||
func (m *Document) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m.Swagger != "" {
|
||||
info = append(info, yaml.MapItem{Key: "swagger", Value: m.Swagger})
|
||||
}
|
||||
if m.Info != nil {
|
||||
info = append(info, yaml.MapItem{Key: "info", Value: m.Info.ToRawInfo()})
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "swagger", Value: m.Swagger})
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "info", Value: m.Info.ToRawInfo()})
|
||||
// &{Name:info Type:Info StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:}
|
||||
if m.Host != "" {
|
||||
info = append(info, yaml.MapItem{Key: "host", Value: m.Host})
|
||||
|
|
@ -7242,9 +7254,8 @@ func (m *Document) ToRawInfo() interface{} {
|
|||
if len(m.Produces) != 0 {
|
||||
info = append(info, yaml.MapItem{Key: "produces", Value: m.Produces})
|
||||
}
|
||||
if m.Paths != nil {
|
||||
info = append(info, yaml.MapItem{Key: "paths", Value: m.Paths.ToRawInfo()})
|
||||
}
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "paths", Value: m.Paths.ToRawInfo()})
|
||||
// &{Name:paths Type:Paths StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:}
|
||||
if m.Definitions != nil {
|
||||
info = append(info, yaml.MapItem{Key: "definitions", Value: m.Definitions.ToRawInfo()})
|
||||
|
|
@ -7294,6 +7305,9 @@ func (m *Document) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of Examples suitable for JSON or YAML export.
|
||||
func (m *Examples) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.AdditionalProperties != nil {
|
||||
for _, item := range m.AdditionalProperties {
|
||||
info = append(info, yaml.MapItem{Key: item.Name, Value: item.Value.ToRawInfo()})
|
||||
|
|
@ -7306,12 +7320,14 @@ func (m *Examples) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of ExternalDocs suitable for JSON or YAML export.
|
||||
func (m *ExternalDocs) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.Description != "" {
|
||||
info = append(info, yaml.MapItem{Key: "description", Value: m.Description})
|
||||
}
|
||||
if m.Url != "" {
|
||||
info = append(info, yaml.MapItem{Key: "url", Value: m.Url})
|
||||
}
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "url", Value: m.Url})
|
||||
if m.VendorExtension != nil {
|
||||
for _, item := range m.VendorExtension {
|
||||
info = append(info, yaml.MapItem{Key: item.Name, Value: item.Value.ToRawInfo()})
|
||||
|
|
@ -7324,6 +7340,9 @@ func (m *ExternalDocs) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of FileSchema suitable for JSON or YAML export.
|
||||
func (m *FileSchema) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.Format != "" {
|
||||
info = append(info, yaml.MapItem{Key: "format", Value: m.Format})
|
||||
}
|
||||
|
|
@ -7340,9 +7359,8 @@ func (m *FileSchema) ToRawInfo() interface{} {
|
|||
if len(m.Required) != 0 {
|
||||
info = append(info, yaml.MapItem{Key: "required", Value: m.Required})
|
||||
}
|
||||
if m.Type != "" {
|
||||
info = append(info, yaml.MapItem{Key: "type", Value: m.Type})
|
||||
}
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "type", Value: m.Type})
|
||||
if m.ReadOnly != false {
|
||||
info = append(info, yaml.MapItem{Key: "readOnly", Value: m.ReadOnly})
|
||||
}
|
||||
|
|
@ -7366,6 +7384,9 @@ func (m *FileSchema) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of FormDataParameterSubSchema suitable for JSON or YAML export.
|
||||
func (m *FormDataParameterSubSchema) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.Required != false {
|
||||
info = append(info, yaml.MapItem{Key: "required", Value: m.Required})
|
||||
}
|
||||
|
|
@ -7451,9 +7472,11 @@ func (m *FormDataParameterSubSchema) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of Header suitable for JSON or YAML export.
|
||||
func (m *Header) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m.Type != "" {
|
||||
info = append(info, yaml.MapItem{Key: "type", Value: m.Type})
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "type", Value: m.Type})
|
||||
if m.Format != "" {
|
||||
info = append(info, yaml.MapItem{Key: "format", Value: m.Format})
|
||||
}
|
||||
|
|
@ -7524,6 +7547,9 @@ func (m *Header) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of HeaderParameterSubSchema suitable for JSON or YAML export.
|
||||
func (m *HeaderParameterSubSchema) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.Required != false {
|
||||
info = append(info, yaml.MapItem{Key: "required", Value: m.Required})
|
||||
}
|
||||
|
|
@ -7606,6 +7632,9 @@ func (m *HeaderParameterSubSchema) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of Headers suitable for JSON or YAML export.
|
||||
func (m *Headers) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.AdditionalProperties != nil {
|
||||
for _, item := range m.AdditionalProperties {
|
||||
info = append(info, yaml.MapItem{Key: item.Name, Value: item.Value.ToRawInfo()})
|
||||
|
|
@ -7618,12 +7647,13 @@ func (m *Headers) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of Info suitable for JSON or YAML export.
|
||||
func (m *Info) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m.Title != "" {
|
||||
info = append(info, yaml.MapItem{Key: "title", Value: m.Title})
|
||||
}
|
||||
if m.Version != "" {
|
||||
info = append(info, yaml.MapItem{Key: "version", Value: m.Version})
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "title", Value: m.Title})
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "version", Value: m.Version})
|
||||
if m.Description != "" {
|
||||
info = append(info, yaml.MapItem{Key: "description", Value: m.Description})
|
||||
}
|
||||
|
|
@ -7650,6 +7680,9 @@ func (m *Info) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of ItemsItem suitable for JSON or YAML export.
|
||||
func (m *ItemsItem) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if len(m.Schema) != 0 {
|
||||
items := make([]interface{}, 0)
|
||||
for _, item := range m.Schema {
|
||||
|
|
@ -7664,9 +7697,11 @@ func (m *ItemsItem) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of JsonReference suitable for JSON or YAML export.
|
||||
func (m *JsonReference) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m.XRef != "" {
|
||||
info = append(info, yaml.MapItem{Key: "$ref", Value: m.XRef})
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "$ref", Value: m.XRef})
|
||||
if m.Description != "" {
|
||||
info = append(info, yaml.MapItem{Key: "description", Value: m.Description})
|
||||
}
|
||||
|
|
@ -7676,9 +7711,11 @@ func (m *JsonReference) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of License suitable for JSON or YAML export.
|
||||
func (m *License) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m.Name != "" {
|
||||
info = append(info, yaml.MapItem{Key: "name", Value: m.Name})
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "name", Value: m.Name})
|
||||
if m.Url != "" {
|
||||
info = append(info, yaml.MapItem{Key: "url", Value: m.Url})
|
||||
}
|
||||
|
|
@ -7694,6 +7731,9 @@ func (m *License) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of NamedAny suitable for JSON or YAML export.
|
||||
func (m *NamedAny) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.Name != "" {
|
||||
info = append(info, yaml.MapItem{Key: "name", Value: m.Name})
|
||||
}
|
||||
|
|
@ -7704,6 +7744,9 @@ func (m *NamedAny) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of NamedHeader suitable for JSON or YAML export.
|
||||
func (m *NamedHeader) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.Name != "" {
|
||||
info = append(info, yaml.MapItem{Key: "name", Value: m.Name})
|
||||
}
|
||||
|
|
@ -7714,6 +7757,9 @@ func (m *NamedHeader) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of NamedParameter suitable for JSON or YAML export.
|
||||
func (m *NamedParameter) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.Name != "" {
|
||||
info = append(info, yaml.MapItem{Key: "name", Value: m.Name})
|
||||
}
|
||||
|
|
@ -7724,6 +7770,9 @@ func (m *NamedParameter) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of NamedPathItem suitable for JSON or YAML export.
|
||||
func (m *NamedPathItem) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.Name != "" {
|
||||
info = append(info, yaml.MapItem{Key: "name", Value: m.Name})
|
||||
}
|
||||
|
|
@ -7734,6 +7783,9 @@ func (m *NamedPathItem) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of NamedResponse suitable for JSON or YAML export.
|
||||
func (m *NamedResponse) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.Name != "" {
|
||||
info = append(info, yaml.MapItem{Key: "name", Value: m.Name})
|
||||
}
|
||||
|
|
@ -7744,6 +7796,9 @@ func (m *NamedResponse) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of NamedResponseValue suitable for JSON or YAML export.
|
||||
func (m *NamedResponseValue) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.Name != "" {
|
||||
info = append(info, yaml.MapItem{Key: "name", Value: m.Name})
|
||||
}
|
||||
|
|
@ -7754,6 +7809,9 @@ func (m *NamedResponseValue) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of NamedSchema suitable for JSON or YAML export.
|
||||
func (m *NamedSchema) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.Name != "" {
|
||||
info = append(info, yaml.MapItem{Key: "name", Value: m.Name})
|
||||
}
|
||||
|
|
@ -7764,6 +7822,9 @@ func (m *NamedSchema) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of NamedSecurityDefinitionsItem suitable for JSON or YAML export.
|
||||
func (m *NamedSecurityDefinitionsItem) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.Name != "" {
|
||||
info = append(info, yaml.MapItem{Key: "name", Value: m.Name})
|
||||
}
|
||||
|
|
@ -7774,6 +7835,9 @@ func (m *NamedSecurityDefinitionsItem) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of NamedString suitable for JSON or YAML export.
|
||||
func (m *NamedString) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.Name != "" {
|
||||
info = append(info, yaml.MapItem{Key: "name", Value: m.Name})
|
||||
}
|
||||
|
|
@ -7786,6 +7850,9 @@ func (m *NamedString) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of NamedStringArray suitable for JSON or YAML export.
|
||||
func (m *NamedStringArray) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.Name != "" {
|
||||
info = append(info, yaml.MapItem{Key: "name", Value: m.Name})
|
||||
}
|
||||
|
|
@ -7823,22 +7890,21 @@ func (m *NonBodyParameter) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of Oauth2AccessCodeSecurity suitable for JSON or YAML export.
|
||||
func (m *Oauth2AccessCodeSecurity) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m.Type != "" {
|
||||
info = append(info, yaml.MapItem{Key: "type", Value: m.Type})
|
||||
}
|
||||
if m.Flow != "" {
|
||||
info = append(info, yaml.MapItem{Key: "flow", Value: m.Flow})
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "type", Value: m.Type})
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "flow", Value: m.Flow})
|
||||
if m.Scopes != nil {
|
||||
info = append(info, yaml.MapItem{Key: "scopes", Value: m.Scopes.ToRawInfo()})
|
||||
}
|
||||
// &{Name:scopes Type:Oauth2Scopes StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:}
|
||||
if m.AuthorizationUrl != "" {
|
||||
info = append(info, yaml.MapItem{Key: "authorizationUrl", Value: m.AuthorizationUrl})
|
||||
}
|
||||
if m.TokenUrl != "" {
|
||||
info = append(info, yaml.MapItem{Key: "tokenUrl", Value: m.TokenUrl})
|
||||
}
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "authorizationUrl", Value: m.AuthorizationUrl})
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "tokenUrl", Value: m.TokenUrl})
|
||||
if m.Description != "" {
|
||||
info = append(info, yaml.MapItem{Key: "description", Value: m.Description})
|
||||
}
|
||||
|
|
@ -7854,19 +7920,19 @@ func (m *Oauth2AccessCodeSecurity) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of Oauth2ApplicationSecurity suitable for JSON or YAML export.
|
||||
func (m *Oauth2ApplicationSecurity) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m.Type != "" {
|
||||
info = append(info, yaml.MapItem{Key: "type", Value: m.Type})
|
||||
}
|
||||
if m.Flow != "" {
|
||||
info = append(info, yaml.MapItem{Key: "flow", Value: m.Flow})
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "type", Value: m.Type})
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "flow", Value: m.Flow})
|
||||
if m.Scopes != nil {
|
||||
info = append(info, yaml.MapItem{Key: "scopes", Value: m.Scopes.ToRawInfo()})
|
||||
}
|
||||
// &{Name:scopes Type:Oauth2Scopes StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:}
|
||||
if m.TokenUrl != "" {
|
||||
info = append(info, yaml.MapItem{Key: "tokenUrl", Value: m.TokenUrl})
|
||||
}
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "tokenUrl", Value: m.TokenUrl})
|
||||
if m.Description != "" {
|
||||
info = append(info, yaml.MapItem{Key: "description", Value: m.Description})
|
||||
}
|
||||
|
|
@ -7882,19 +7948,19 @@ func (m *Oauth2ApplicationSecurity) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of Oauth2ImplicitSecurity suitable for JSON or YAML export.
|
||||
func (m *Oauth2ImplicitSecurity) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m.Type != "" {
|
||||
info = append(info, yaml.MapItem{Key: "type", Value: m.Type})
|
||||
}
|
||||
if m.Flow != "" {
|
||||
info = append(info, yaml.MapItem{Key: "flow", Value: m.Flow})
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "type", Value: m.Type})
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "flow", Value: m.Flow})
|
||||
if m.Scopes != nil {
|
||||
info = append(info, yaml.MapItem{Key: "scopes", Value: m.Scopes.ToRawInfo()})
|
||||
}
|
||||
// &{Name:scopes Type:Oauth2Scopes StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:}
|
||||
if m.AuthorizationUrl != "" {
|
||||
info = append(info, yaml.MapItem{Key: "authorizationUrl", Value: m.AuthorizationUrl})
|
||||
}
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "authorizationUrl", Value: m.AuthorizationUrl})
|
||||
if m.Description != "" {
|
||||
info = append(info, yaml.MapItem{Key: "description", Value: m.Description})
|
||||
}
|
||||
|
|
@ -7910,19 +7976,19 @@ func (m *Oauth2ImplicitSecurity) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of Oauth2PasswordSecurity suitable for JSON or YAML export.
|
||||
func (m *Oauth2PasswordSecurity) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m.Type != "" {
|
||||
info = append(info, yaml.MapItem{Key: "type", Value: m.Type})
|
||||
}
|
||||
if m.Flow != "" {
|
||||
info = append(info, yaml.MapItem{Key: "flow", Value: m.Flow})
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "type", Value: m.Type})
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "flow", Value: m.Flow})
|
||||
if m.Scopes != nil {
|
||||
info = append(info, yaml.MapItem{Key: "scopes", Value: m.Scopes.ToRawInfo()})
|
||||
}
|
||||
// &{Name:scopes Type:Oauth2Scopes StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:}
|
||||
if m.TokenUrl != "" {
|
||||
info = append(info, yaml.MapItem{Key: "tokenUrl", Value: m.TokenUrl})
|
||||
}
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "tokenUrl", Value: m.TokenUrl})
|
||||
if m.Description != "" {
|
||||
info = append(info, yaml.MapItem{Key: "description", Value: m.Description})
|
||||
}
|
||||
|
|
@ -7938,6 +8004,9 @@ func (m *Oauth2PasswordSecurity) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of Oauth2Scopes suitable for JSON or YAML export.
|
||||
func (m *Oauth2Scopes) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
// &{Name:additionalProperties Type:NamedString StringEnumValues:[] MapType:string Repeated:true Pattern: Implicit:true Description:}
|
||||
return info
|
||||
}
|
||||
|
|
@ -7945,6 +8014,9 @@ func (m *Oauth2Scopes) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of Operation suitable for JSON or YAML export.
|
||||
func (m *Operation) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if len(m.Tags) != 0 {
|
||||
info = append(info, yaml.MapItem{Key: "tags", Value: m.Tags})
|
||||
}
|
||||
|
|
@ -7975,9 +8047,8 @@ func (m *Operation) ToRawInfo() interface{} {
|
|||
info = append(info, yaml.MapItem{Key: "parameters", Value: items})
|
||||
}
|
||||
// &{Name:parameters Type:ParametersItem StringEnumValues:[] MapType: Repeated:true Pattern: Implicit:false Description:The parameters needed to send a valid API call.}
|
||||
if m.Responses != nil {
|
||||
info = append(info, yaml.MapItem{Key: "responses", Value: m.Responses.ToRawInfo()})
|
||||
}
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "responses", Value: m.Responses.ToRawInfo()})
|
||||
// &{Name:responses Type:Responses StringEnumValues:[] MapType: Repeated:false Pattern: Implicit:false Description:}
|
||||
if len(m.Schemes) != 0 {
|
||||
info = append(info, yaml.MapItem{Key: "schemes", Value: m.Schemes})
|
||||
|
|
@ -8022,6 +8093,9 @@ func (m *Parameter) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of ParameterDefinitions suitable for JSON or YAML export.
|
||||
func (m *ParameterDefinitions) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.AdditionalProperties != nil {
|
||||
for _, item := range m.AdditionalProperties {
|
||||
info = append(info, yaml.MapItem{Key: item.Name, Value: item.Value.ToRawInfo()})
|
||||
|
|
@ -8051,6 +8125,9 @@ func (m *ParametersItem) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of PathItem suitable for JSON or YAML export.
|
||||
func (m *PathItem) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.XRef != "" {
|
||||
info = append(info, yaml.MapItem{Key: "$ref", Value: m.XRef})
|
||||
}
|
||||
|
|
@ -8102,9 +8179,11 @@ func (m *PathItem) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of PathParameterSubSchema suitable for JSON or YAML export.
|
||||
func (m *PathParameterSubSchema) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m.Required != false {
|
||||
info = append(info, yaml.MapItem{Key: "required", Value: m.Required})
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "required", Value: m.Required})
|
||||
if m.In != "" {
|
||||
info = append(info, yaml.MapItem{Key: "in", Value: m.In})
|
||||
}
|
||||
|
|
@ -8184,6 +8263,9 @@ func (m *PathParameterSubSchema) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of Paths suitable for JSON or YAML export.
|
||||
func (m *Paths) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.VendorExtension != nil {
|
||||
for _, item := range m.VendorExtension {
|
||||
info = append(info, yaml.MapItem{Key: item.Name, Value: item.Value.ToRawInfo()})
|
||||
|
|
@ -8202,6 +8284,9 @@ func (m *Paths) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of PrimitivesItems suitable for JSON or YAML export.
|
||||
func (m *PrimitivesItems) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.Type != "" {
|
||||
info = append(info, yaml.MapItem{Key: "type", Value: m.Type})
|
||||
}
|
||||
|
|
@ -8272,6 +8357,9 @@ func (m *PrimitivesItems) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of Properties suitable for JSON or YAML export.
|
||||
func (m *Properties) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.AdditionalProperties != nil {
|
||||
for _, item := range m.AdditionalProperties {
|
||||
info = append(info, yaml.MapItem{Key: item.Name, Value: item.Value.ToRawInfo()})
|
||||
|
|
@ -8284,6 +8372,9 @@ func (m *Properties) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of QueryParameterSubSchema suitable for JSON or YAML export.
|
||||
func (m *QueryParameterSubSchema) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.Required != false {
|
||||
info = append(info, yaml.MapItem{Key: "required", Value: m.Required})
|
||||
}
|
||||
|
|
@ -8369,9 +8460,11 @@ func (m *QueryParameterSubSchema) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of Response suitable for JSON or YAML export.
|
||||
func (m *Response) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m.Description != "" {
|
||||
info = append(info, yaml.MapItem{Key: "description", Value: m.Description})
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "description", Value: m.Description})
|
||||
if m.Schema != nil {
|
||||
info = append(info, yaml.MapItem{Key: "schema", Value: m.Schema.ToRawInfo()})
|
||||
}
|
||||
|
|
@ -8396,6 +8489,9 @@ func (m *Response) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of ResponseDefinitions suitable for JSON or YAML export.
|
||||
func (m *ResponseDefinitions) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.AdditionalProperties != nil {
|
||||
for _, item := range m.AdditionalProperties {
|
||||
info = append(info, yaml.MapItem{Key: item.Name, Value: item.Value.ToRawInfo()})
|
||||
|
|
@ -8425,6 +8521,9 @@ func (m *ResponseValue) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of Responses suitable for JSON or YAML export.
|
||||
func (m *Responses) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.ResponseCode != nil {
|
||||
for _, item := range m.ResponseCode {
|
||||
info = append(info, yaml.MapItem{Key: item.Name, Value: item.Value.ToRawInfo()})
|
||||
|
|
@ -8443,6 +8542,9 @@ func (m *Responses) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of Schema suitable for JSON or YAML export.
|
||||
func (m *Schema) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.XRef != "" {
|
||||
info = append(info, yaml.MapItem{Key: "$ref", Value: m.XRef})
|
||||
}
|
||||
|
|
@ -8588,6 +8690,9 @@ func (m *SchemaItem) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of SecurityDefinitions suitable for JSON or YAML export.
|
||||
func (m *SecurityDefinitions) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.AdditionalProperties != nil {
|
||||
for _, item := range m.AdditionalProperties {
|
||||
info = append(info, yaml.MapItem{Key: item.Name, Value: item.Value.ToRawInfo()})
|
||||
|
|
@ -8637,6 +8742,9 @@ func (m *SecurityDefinitionsItem) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of SecurityRequirement suitable for JSON or YAML export.
|
||||
func (m *SecurityRequirement) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.AdditionalProperties != nil {
|
||||
for _, item := range m.AdditionalProperties {
|
||||
info = append(info, yaml.MapItem{Key: item.Name, Value: item.Value.ToRawInfo()})
|
||||
|
|
@ -8654,9 +8762,11 @@ func (m *StringArray) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of Tag suitable for JSON or YAML export.
|
||||
func (m *Tag) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m.Name != "" {
|
||||
info = append(info, yaml.MapItem{Key: "name", Value: m.Name})
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
// always include this required field.
|
||||
info = append(info, yaml.MapItem{Key: "name", Value: m.Name})
|
||||
if m.Description != "" {
|
||||
info = append(info, yaml.MapItem{Key: "description", Value: m.Description})
|
||||
}
|
||||
|
|
@ -8676,6 +8786,9 @@ func (m *Tag) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of TypeItem suitable for JSON or YAML export.
|
||||
func (m *TypeItem) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if len(m.Value) != 0 {
|
||||
info = append(info, yaml.MapItem{Key: "value", Value: m.Value})
|
||||
}
|
||||
|
|
@ -8685,6 +8798,9 @@ func (m *TypeItem) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of VendorExtension suitable for JSON or YAML export.
|
||||
func (m *VendorExtension) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.AdditionalProperties != nil {
|
||||
for _, item := range m.AdditionalProperties {
|
||||
info = append(info, yaml.MapItem{Key: item.Name, Value: item.Value.ToRawInfo()})
|
||||
|
|
@ -8697,6 +8813,9 @@ func (m *VendorExtension) ToRawInfo() interface{} {
|
|||
// ToRawInfo returns a description of Xml suitable for JSON or YAML export.
|
||||
func (m *Xml) ToRawInfo() interface{} {
|
||||
info := yaml.MapSlice{}
|
||||
if m == nil {
|
||||
return info
|
||||
}
|
||||
if m.Name != "" {
|
||||
info = append(info, yaml.MapItem{Key: "name", Value: m.Name})
|
||||
}
|
||||
|
|
|
|||
File diff suppressed because it is too large
Load Diff
|
|
@ -17,13 +17,14 @@ package compiler
|
|||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"gopkg.in/yaml.v2"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
yaml "gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
var fileCache map[string][]byte
|
||||
|
|
@ -31,6 +32,8 @@ var infoCache map[string]interface{}
|
|||
var count int64
|
||||
|
||||
var verboseReader = false
|
||||
var fileCacheEnable = true
|
||||
var infoCacheEnable = true
|
||||
|
||||
func initializeFileCache() {
|
||||
if fileCache == nil {
|
||||
|
|
@ -44,29 +47,67 @@ func initializeInfoCache() {
|
|||
}
|
||||
}
|
||||
|
||||
func DisableFileCache() {
|
||||
fileCacheEnable = false
|
||||
}
|
||||
|
||||
func DisableInfoCache() {
|
||||
infoCacheEnable = false
|
||||
}
|
||||
|
||||
func RemoveFromFileCache(fileurl string) {
|
||||
if !fileCacheEnable {
|
||||
return
|
||||
}
|
||||
initializeFileCache()
|
||||
delete(fileCache, fileurl)
|
||||
}
|
||||
|
||||
func RemoveFromInfoCache(filename string) {
|
||||
if !infoCacheEnable {
|
||||
return
|
||||
}
|
||||
initializeInfoCache()
|
||||
delete(infoCache, filename)
|
||||
}
|
||||
|
||||
func GetInfoCache() map[string]interface{} {
|
||||
if infoCache == nil {
|
||||
initializeInfoCache()
|
||||
}
|
||||
return infoCache
|
||||
}
|
||||
|
||||
func ClearInfoCache() {
|
||||
infoCache = make(map[string]interface{})
|
||||
}
|
||||
|
||||
// FetchFile gets a specified file from the local filesystem or a remote location.
|
||||
func FetchFile(fileurl string) ([]byte, error) {
|
||||
var bytes []byte
|
||||
initializeFileCache()
|
||||
bytes, ok := fileCache[fileurl]
|
||||
if ok {
|
||||
if verboseReader {
|
||||
log.Printf("Cache hit %s", fileurl)
|
||||
if fileCacheEnable {
|
||||
bytes, ok := fileCache[fileurl]
|
||||
if ok {
|
||||
if verboseReader {
|
||||
log.Printf("Cache hit %s", fileurl)
|
||||
}
|
||||
return bytes, nil
|
||||
}
|
||||
if verboseReader {
|
||||
log.Printf("Fetching %s", fileurl)
|
||||
}
|
||||
return bytes, nil
|
||||
}
|
||||
if verboseReader {
|
||||
log.Printf("Fetching %s", fileurl)
|
||||
}
|
||||
response, err := http.Get(fileurl)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer response.Body.Close()
|
||||
if response.StatusCode != 200 {
|
||||
return nil, errors.New(fmt.Sprintf("Error downloading %s: %s", fileurl, response.Status))
|
||||
}
|
||||
defer response.Body.Close()
|
||||
bytes, err = ioutil.ReadAll(response.Body)
|
||||
if err == nil {
|
||||
if fileCacheEnable && err == nil {
|
||||
fileCache[fileurl] = bytes
|
||||
}
|
||||
return bytes, err
|
||||
|
|
@ -95,22 +136,24 @@ func ReadBytesForFile(filename string) ([]byte, error) {
|
|||
// ReadInfoFromBytes unmarshals a file as a yaml.MapSlice.
|
||||
func ReadInfoFromBytes(filename string, bytes []byte) (interface{}, error) {
|
||||
initializeInfoCache()
|
||||
cachedInfo, ok := infoCache[filename]
|
||||
if ok {
|
||||
if verboseReader {
|
||||
log.Printf("Cache hit info for file %s", filename)
|
||||
if infoCacheEnable {
|
||||
cachedInfo, ok := infoCache[filename]
|
||||
if ok {
|
||||
if verboseReader {
|
||||
log.Printf("Cache hit info for file %s", filename)
|
||||
}
|
||||
return cachedInfo, nil
|
||||
}
|
||||
if verboseReader {
|
||||
log.Printf("Reading info for file %s", filename)
|
||||
}
|
||||
return cachedInfo, nil
|
||||
}
|
||||
if verboseReader {
|
||||
log.Printf("Reading info for file %s", filename)
|
||||
}
|
||||
var info yaml.MapSlice
|
||||
err := yaml.Unmarshal(bytes, &info)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if len(filename) > 0 {
|
||||
if infoCacheEnable && len(filename) > 0 {
|
||||
infoCache[filename] = info
|
||||
}
|
||||
return info, nil
|
||||
|
|
@ -119,7 +162,7 @@ func ReadInfoFromBytes(filename string, bytes []byte) (interface{}, error) {
|
|||
// ReadInfoForRef reads a file and return the fragment needed to resolve a $ref.
|
||||
func ReadInfoForRef(basefile string, ref string) (interface{}, error) {
|
||||
initializeInfoCache()
|
||||
{
|
||||
if infoCacheEnable {
|
||||
info, ok := infoCache[ref]
|
||||
if ok {
|
||||
if verboseReader {
|
||||
|
|
@ -127,16 +170,20 @@ func ReadInfoForRef(basefile string, ref string) (interface{}, error) {
|
|||
}
|
||||
return info, nil
|
||||
}
|
||||
}
|
||||
if verboseReader {
|
||||
log.Printf("Reading info for ref %s#%s", basefile, ref)
|
||||
if verboseReader {
|
||||
log.Printf("Reading info for ref %s#%s", basefile, ref)
|
||||
}
|
||||
}
|
||||
count = count + 1
|
||||
basedir, _ := filepath.Split(basefile)
|
||||
parts := strings.Split(ref, "#")
|
||||
var filename string
|
||||
if parts[0] != "" {
|
||||
filename = basedir + parts[0]
|
||||
filename = parts[0]
|
||||
if _, err := url.ParseRequestURI(parts[0]); err != nil {
|
||||
// It is not an URL, so the file is local
|
||||
filename = basedir + parts[0]
|
||||
}
|
||||
} else {
|
||||
filename = basefile
|
||||
}
|
||||
|
|
@ -170,6 +217,8 @@ func ReadInfoForRef(basefile string, ref string) (interface{}, error) {
|
|||
}
|
||||
}
|
||||
}
|
||||
infoCache[ref] = info
|
||||
if infoCacheEnable {
|
||||
infoCache[ref] = info
|
||||
}
|
||||
return info, nil
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,5 +0,0 @@
|
|||
go get github.com/golang/protobuf/protoc-gen-go
|
||||
|
||||
protoc \
|
||||
--go_out=Mgoogle/protobuf/any.proto=github.com/golang/protobuf/ptypes/any:. *.proto
|
||||
|
||||
|
|
@ -1,24 +1,14 @@
|
|||
// Code generated by protoc-gen-go. DO NOT EDIT.
|
||||
// source: extension.proto
|
||||
// source: extensions/extension.proto
|
||||
|
||||
/*
|
||||
Package openapiextension_v1 is a generated protocol buffer package.
|
||||
|
||||
It is generated from these files:
|
||||
extension.proto
|
||||
|
||||
It has these top-level messages:
|
||||
Version
|
||||
ExtensionHandlerRequest
|
||||
ExtensionHandlerResponse
|
||||
Wrapper
|
||||
*/
|
||||
package openapiextension_v1
|
||||
|
||||
import proto "github.com/golang/protobuf/proto"
|
||||
import fmt "fmt"
|
||||
import math "math"
|
||||
import google_protobuf "github.com/golang/protobuf/ptypes/any"
|
||||
import (
|
||||
fmt "fmt"
|
||||
proto "github.com/golang/protobuf/proto"
|
||||
any "github.com/golang/protobuf/ptypes/any"
|
||||
math "math"
|
||||
)
|
||||
|
||||
// Reference imports to suppress errors if they are not otherwise used.
|
||||
var _ = proto.Marshal
|
||||
|
|
@ -29,22 +19,45 @@ var _ = math.Inf
|
|||
// is compatible with the proto package it is being compiled against.
|
||||
// A compilation error at this line likely means your copy of the
|
||||
// proto package needs to be updated.
|
||||
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
|
||||
const _ = proto.ProtoPackageIsVersion3 // please upgrade the proto package
|
||||
|
||||
// The version number of OpenAPI compiler.
|
||||
type Version struct {
|
||||
Major int32 `protobuf:"varint,1,opt,name=major" json:"major,omitempty"`
|
||||
Minor int32 `protobuf:"varint,2,opt,name=minor" json:"minor,omitempty"`
|
||||
Patch int32 `protobuf:"varint,3,opt,name=patch" json:"patch,omitempty"`
|
||||
Major int32 `protobuf:"varint,1,opt,name=major,proto3" json:"major,omitempty"`
|
||||
Minor int32 `protobuf:"varint,2,opt,name=minor,proto3" json:"minor,omitempty"`
|
||||
Patch int32 `protobuf:"varint,3,opt,name=patch,proto3" json:"patch,omitempty"`
|
||||
// A suffix for alpha, beta or rc release, e.g., "alpha-1", "rc2". It should
|
||||
// be empty for mainline stable releases.
|
||||
Suffix string `protobuf:"bytes,4,opt,name=suffix" json:"suffix,omitempty"`
|
||||
Suffix string `protobuf:"bytes,4,opt,name=suffix,proto3" json:"suffix,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *Version) Reset() { *m = Version{} }
|
||||
func (m *Version) String() string { return proto.CompactTextString(m) }
|
||||
func (*Version) ProtoMessage() {}
|
||||
func (*Version) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{0} }
|
||||
func (m *Version) Reset() { *m = Version{} }
|
||||
func (m *Version) String() string { return proto.CompactTextString(m) }
|
||||
func (*Version) ProtoMessage() {}
|
||||
func (*Version) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_661e47e790f76671, []int{0}
|
||||
}
|
||||
|
||||
func (m *Version) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_Version.Unmarshal(m, b)
|
||||
}
|
||||
func (m *Version) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_Version.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *Version) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_Version.Merge(m, src)
|
||||
}
|
||||
func (m *Version) XXX_Size() int {
|
||||
return xxx_messageInfo_Version.Size(m)
|
||||
}
|
||||
func (m *Version) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_Version.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_Version proto.InternalMessageInfo
|
||||
|
||||
func (m *Version) GetMajor() int32 {
|
||||
if m != nil {
|
||||
|
|
@ -78,15 +91,38 @@ func (m *Version) GetSuffix() string {
|
|||
type ExtensionHandlerRequest struct {
|
||||
// The OpenAPI descriptions that were explicitly listed on the command line.
|
||||
// The specifications will appear in the order they are specified to gnostic.
|
||||
Wrapper *Wrapper `protobuf:"bytes,1,opt,name=wrapper" json:"wrapper,omitempty"`
|
||||
Wrapper *Wrapper `protobuf:"bytes,1,opt,name=wrapper,proto3" json:"wrapper,omitempty"`
|
||||
// The version number of openapi compiler.
|
||||
CompilerVersion *Version `protobuf:"bytes,3,opt,name=compiler_version,json=compilerVersion" json:"compiler_version,omitempty"`
|
||||
CompilerVersion *Version `protobuf:"bytes,3,opt,name=compiler_version,json=compilerVersion,proto3" json:"compiler_version,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *ExtensionHandlerRequest) Reset() { *m = ExtensionHandlerRequest{} }
|
||||
func (m *ExtensionHandlerRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*ExtensionHandlerRequest) ProtoMessage() {}
|
||||
func (*ExtensionHandlerRequest) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{1} }
|
||||
func (m *ExtensionHandlerRequest) Reset() { *m = ExtensionHandlerRequest{} }
|
||||
func (m *ExtensionHandlerRequest) String() string { return proto.CompactTextString(m) }
|
||||
func (*ExtensionHandlerRequest) ProtoMessage() {}
|
||||
func (*ExtensionHandlerRequest) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_661e47e790f76671, []int{1}
|
||||
}
|
||||
|
||||
func (m *ExtensionHandlerRequest) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ExtensionHandlerRequest.Unmarshal(m, b)
|
||||
}
|
||||
func (m *ExtensionHandlerRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_ExtensionHandlerRequest.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *ExtensionHandlerRequest) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_ExtensionHandlerRequest.Merge(m, src)
|
||||
}
|
||||
func (m *ExtensionHandlerRequest) XXX_Size() int {
|
||||
return xxx_messageInfo_ExtensionHandlerRequest.Size(m)
|
||||
}
|
||||
func (m *ExtensionHandlerRequest) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_ExtensionHandlerRequest.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_ExtensionHandlerRequest proto.InternalMessageInfo
|
||||
|
||||
func (m *ExtensionHandlerRequest) GetWrapper() *Wrapper {
|
||||
if m != nil {
|
||||
|
|
@ -105,7 +141,7 @@ func (m *ExtensionHandlerRequest) GetCompilerVersion() *Version {
|
|||
// The extensions writes an encoded ExtensionHandlerResponse to stdout.
|
||||
type ExtensionHandlerResponse struct {
|
||||
// true if the extension is handled by the extension handler; false otherwise
|
||||
Handled bool `protobuf:"varint,1,opt,name=handled" json:"handled,omitempty"`
|
||||
Handled bool `protobuf:"varint,1,opt,name=handled,proto3" json:"handled,omitempty"`
|
||||
// Error message. If non-empty, the extension handling failed.
|
||||
// The extension handler process should exit with status code zero
|
||||
// even if it reports an error in this way.
|
||||
|
|
@ -115,15 +151,38 @@ type ExtensionHandlerResponse struct {
|
|||
// itself -- such as the input Document being unparseable -- should be
|
||||
// reported by writing a message to stderr and exiting with a non-zero
|
||||
// status code.
|
||||
Error []string `protobuf:"bytes,2,rep,name=error" json:"error,omitempty"`
|
||||
Error []string `protobuf:"bytes,2,rep,name=error,proto3" json:"error,omitempty"`
|
||||
// text output
|
||||
Value *google_protobuf.Any `protobuf:"bytes,3,opt,name=value" json:"value,omitempty"`
|
||||
Value *any.Any `protobuf:"bytes,3,opt,name=value,proto3" json:"value,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *ExtensionHandlerResponse) Reset() { *m = ExtensionHandlerResponse{} }
|
||||
func (m *ExtensionHandlerResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*ExtensionHandlerResponse) ProtoMessage() {}
|
||||
func (*ExtensionHandlerResponse) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{2} }
|
||||
func (m *ExtensionHandlerResponse) Reset() { *m = ExtensionHandlerResponse{} }
|
||||
func (m *ExtensionHandlerResponse) String() string { return proto.CompactTextString(m) }
|
||||
func (*ExtensionHandlerResponse) ProtoMessage() {}
|
||||
func (*ExtensionHandlerResponse) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_661e47e790f76671, []int{2}
|
||||
}
|
||||
|
||||
func (m *ExtensionHandlerResponse) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_ExtensionHandlerResponse.Unmarshal(m, b)
|
||||
}
|
||||
func (m *ExtensionHandlerResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_ExtensionHandlerResponse.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *ExtensionHandlerResponse) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_ExtensionHandlerResponse.Merge(m, src)
|
||||
}
|
||||
func (m *ExtensionHandlerResponse) XXX_Size() int {
|
||||
return xxx_messageInfo_ExtensionHandlerResponse.Size(m)
|
||||
}
|
||||
func (m *ExtensionHandlerResponse) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_ExtensionHandlerResponse.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_ExtensionHandlerResponse proto.InternalMessageInfo
|
||||
|
||||
func (m *ExtensionHandlerResponse) GetHandled() bool {
|
||||
if m != nil {
|
||||
|
|
@ -139,7 +198,7 @@ func (m *ExtensionHandlerResponse) GetError() []string {
|
|||
return nil
|
||||
}
|
||||
|
||||
func (m *ExtensionHandlerResponse) GetValue() *google_protobuf.Any {
|
||||
func (m *ExtensionHandlerResponse) GetValue() *any.Any {
|
||||
if m != nil {
|
||||
return m.Value
|
||||
}
|
||||
|
|
@ -148,17 +207,40 @@ func (m *ExtensionHandlerResponse) GetValue() *google_protobuf.Any {
|
|||
|
||||
type Wrapper struct {
|
||||
// version of the OpenAPI specification in which this extension was written.
|
||||
Version string `protobuf:"bytes,1,opt,name=version" json:"version,omitempty"`
|
||||
Version string `protobuf:"bytes,1,opt,name=version,proto3" json:"version,omitempty"`
|
||||
// Name of the extension
|
||||
ExtensionName string `protobuf:"bytes,2,opt,name=extension_name,json=extensionName" json:"extension_name,omitempty"`
|
||||
ExtensionName string `protobuf:"bytes,2,opt,name=extension_name,json=extensionName,proto3" json:"extension_name,omitempty"`
|
||||
// Must be a valid yaml for the proto
|
||||
Yaml string `protobuf:"bytes,3,opt,name=yaml" json:"yaml,omitempty"`
|
||||
Yaml string `protobuf:"bytes,3,opt,name=yaml,proto3" json:"yaml,omitempty"`
|
||||
XXX_NoUnkeyedLiteral struct{} `json:"-"`
|
||||
XXX_unrecognized []byte `json:"-"`
|
||||
XXX_sizecache int32 `json:"-"`
|
||||
}
|
||||
|
||||
func (m *Wrapper) Reset() { *m = Wrapper{} }
|
||||
func (m *Wrapper) String() string { return proto.CompactTextString(m) }
|
||||
func (*Wrapper) ProtoMessage() {}
|
||||
func (*Wrapper) Descriptor() ([]byte, []int) { return fileDescriptor0, []int{3} }
|
||||
func (m *Wrapper) Reset() { *m = Wrapper{} }
|
||||
func (m *Wrapper) String() string { return proto.CompactTextString(m) }
|
||||
func (*Wrapper) ProtoMessage() {}
|
||||
func (*Wrapper) Descriptor() ([]byte, []int) {
|
||||
return fileDescriptor_661e47e790f76671, []int{3}
|
||||
}
|
||||
|
||||
func (m *Wrapper) XXX_Unmarshal(b []byte) error {
|
||||
return xxx_messageInfo_Wrapper.Unmarshal(m, b)
|
||||
}
|
||||
func (m *Wrapper) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
|
||||
return xxx_messageInfo_Wrapper.Marshal(b, m, deterministic)
|
||||
}
|
||||
func (m *Wrapper) XXX_Merge(src proto.Message) {
|
||||
xxx_messageInfo_Wrapper.Merge(m, src)
|
||||
}
|
||||
func (m *Wrapper) XXX_Size() int {
|
||||
return xxx_messageInfo_Wrapper.Size(m)
|
||||
}
|
||||
func (m *Wrapper) XXX_DiscardUnknown() {
|
||||
xxx_messageInfo_Wrapper.DiscardUnknown(m)
|
||||
}
|
||||
|
||||
var xxx_messageInfo_Wrapper proto.InternalMessageInfo
|
||||
|
||||
func (m *Wrapper) GetVersion() string {
|
||||
if m != nil {
|
||||
|
|
@ -188,31 +270,31 @@ func init() {
|
|||
proto.RegisterType((*Wrapper)(nil), "openapiextension.v1.Wrapper")
|
||||
}
|
||||
|
||||
func init() { proto.RegisterFile("extension.proto", fileDescriptor0) }
|
||||
func init() { proto.RegisterFile("extensions/extension.proto", fileDescriptor_661e47e790f76671) }
|
||||
|
||||
var fileDescriptor0 = []byte{
|
||||
// 357 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0x74, 0x91, 0x4d, 0x4b, 0xc3, 0x40,
|
||||
0x18, 0x84, 0x49, 0xbf, 0x62, 0x56, 0x6c, 0x65, 0x2d, 0x1a, 0xc5, 0x43, 0x09, 0x08, 0x45, 0x64,
|
||||
0x4b, 0x15, 0xbc, 0xb7, 0x50, 0xd4, 0x8b, 0x2d, 0x7b, 0xa8, 0x37, 0xcb, 0x36, 0x7d, 0x9b, 0x46,
|
||||
0x92, 0xdd, 0x75, 0xf3, 0x61, 0xfb, 0x57, 0x3c, 0xfa, 0x4b, 0x25, 0xbb, 0x49, 0x3d, 0xa8, 0xb7,
|
||||
0xcc, 0xc3, 0x24, 0xef, 0xcc, 0x04, 0x75, 0x60, 0x9b, 0x02, 0x4f, 0x42, 0xc1, 0x89, 0x54, 0x22,
|
||||
0x15, 0xf8, 0x44, 0x48, 0xe0, 0x4c, 0x86, 0x3f, 0x3c, 0x1f, 0x5e, 0x9c, 0x07, 0x42, 0x04, 0x11,
|
||||
0x0c, 0xb4, 0x65, 0x99, 0xad, 0x07, 0x8c, 0xef, 0x8c, 0xdf, 0xf3, 0x91, 0x3d, 0x07, 0x55, 0x18,
|
||||
0x71, 0x17, 0x35, 0x63, 0xf6, 0x26, 0x94, 0x6b, 0xf5, 0xac, 0x7e, 0x93, 0x1a, 0xa1, 0x69, 0xc8,
|
||||
0x85, 0x72, 0x6b, 0x25, 0x2d, 0x44, 0x41, 0x25, 0x4b, 0xfd, 0x8d, 0x5b, 0x37, 0x54, 0x0b, 0x7c,
|
||||
0x8a, 0x5a, 0x49, 0xb6, 0x5e, 0x87, 0x5b, 0xb7, 0xd1, 0xb3, 0xfa, 0x0e, 0x2d, 0x95, 0xf7, 0x69,
|
||||
0xa1, 0xb3, 0x49, 0x15, 0xe8, 0x91, 0xf1, 0x55, 0x04, 0x8a, 0xc2, 0x7b, 0x06, 0x49, 0x8a, 0xef,
|
||||
0x91, 0xfd, 0xa1, 0x98, 0x94, 0x60, 0xee, 0x1e, 0xde, 0x5e, 0x92, 0x3f, 0x2a, 0x90, 0x17, 0xe3,
|
||||
0xa1, 0x95, 0x19, 0x3f, 0xa0, 0x63, 0x5f, 0xc4, 0x32, 0x8c, 0x40, 0x2d, 0x72, 0xd3, 0x40, 0x87,
|
||||
0xf9, 0xef, 0x03, 0x65, 0x4b, 0xda, 0xa9, 0xde, 0x2a, 0x81, 0x97, 0x23, 0xf7, 0x77, 0xb6, 0x44,
|
||||
0x0a, 0x9e, 0x00, 0x76, 0x91, 0xbd, 0xd1, 0x68, 0xa5, 0xc3, 0x1d, 0xd0, 0x4a, 0x16, 0x03, 0x80,
|
||||
0x52, 0x7a, 0x96, 0x7a, 0xdf, 0xa1, 0x46, 0xe0, 0x6b, 0xd4, 0xcc, 0x59, 0x94, 0x41, 0x99, 0xa4,
|
||||
0x4b, 0xcc, 0xf0, 0xa4, 0x1a, 0x9e, 0x8c, 0xf8, 0x8e, 0x1a, 0x8b, 0xf7, 0x8a, 0xec, 0xb2, 0x54,
|
||||
0x71, 0xa6, 0xaa, 0x60, 0xe9, 0xe1, 0x2a, 0x89, 0xaf, 0x50, 0x7b, 0xdf, 0x62, 0xc1, 0x59, 0x0c,
|
||||
0xfa, 0x37, 0x38, 0xf4, 0x68, 0x4f, 0x9f, 0x59, 0x0c, 0x18, 0xa3, 0xc6, 0x8e, 0xc5, 0x91, 0x3e,
|
||||
0xeb, 0x50, 0xfd, 0x3c, 0xbe, 0x41, 0x6d, 0xa1, 0x02, 0x12, 0x70, 0x91, 0xa4, 0xa1, 0x4f, 0xf2,
|
||||
0xe1, 0x18, 0x4f, 0x25, 0xf0, 0xd1, 0xec, 0x69, 0x5f, 0x77, 0x3e, 0x9c, 0x59, 0x5f, 0xb5, 0xfa,
|
||||
0x74, 0x34, 0x59, 0xb6, 0x74, 0xc4, 0xbb, 0xef, 0x00, 0x00, 0x00, 0xff, 0xff, 0x84, 0x5c, 0x6b,
|
||||
0x80, 0x51, 0x02, 0x00, 0x00,
|
||||
var fileDescriptor_661e47e790f76671 = []byte{
|
||||
// 360 bytes of a gzipped FileDescriptorProto
|
||||
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x09, 0x6e, 0x88, 0x02, 0xff, 0x74, 0x91, 0xdf, 0x4b, 0xeb, 0x30,
|
||||
0x1c, 0xc5, 0xe9, 0x7e, 0xf5, 0xee, 0x7b, 0xb9, 0xbb, 0x12, 0x87, 0xd6, 0xe1, 0x83, 0x14, 0x04,
|
||||
0x11, 0xe9, 0x98, 0x82, 0xef, 0x1b, 0x0c, 0xf5, 0xc5, 0x8d, 0x3c, 0xcc, 0x37, 0x47, 0xd6, 0x65,
|
||||
0x5d, 0xa5, 0x4d, 0x62, 0xfa, 0xc3, 0xed, 0x5f, 0xf1, 0xd1, 0xbf, 0xd4, 0x34, 0x69, 0xeb, 0x83,
|
||||
0xfa, 0x96, 0xf3, 0xe1, 0x34, 0x39, 0xe7, 0x14, 0x06, 0x74, 0x97, 0x52, 0x96, 0x84, 0x9c, 0x25,
|
||||
0xc3, 0xfa, 0xe8, 0x09, 0xc9, 0x53, 0x8e, 0x0e, 0xb9, 0xa0, 0x8c, 0x88, 0xf0, 0x8b, 0xe7, 0xa3,
|
||||
0xc1, 0x49, 0xc0, 0x79, 0x10, 0xd1, 0xa1, 0xb6, 0xac, 0xb2, 0xcd, 0x90, 0xb0, 0xbd, 0xf1, 0xbb,
|
||||
0x3e, 0xd8, 0x0b, 0x2a, 0x0b, 0x23, 0xea, 0x43, 0x3b, 0x26, 0x2f, 0x5c, 0x3a, 0xd6, 0x99, 0x75,
|
||||
0xd1, 0xc6, 0x46, 0x68, 0x1a, 0x32, 0x45, 0x1b, 0x25, 0x2d, 0x44, 0x41, 0x05, 0x49, 0xfd, 0xad,
|
||||
0xd3, 0x34, 0x54, 0x0b, 0x74, 0x04, 0x9d, 0x24, 0xdb, 0x6c, 0xc2, 0x9d, 0xd3, 0x52, 0xb8, 0x8b,
|
||||
0x4b, 0xe5, 0xbe, 0x5b, 0x70, 0x3c, 0xad, 0x02, 0xdd, 0x13, 0xb6, 0x8e, 0xa8, 0xc4, 0xf4, 0x35,
|
||||
0xa3, 0x49, 0x8a, 0x6e, 0xc1, 0x7e, 0x93, 0x44, 0x08, 0x6a, 0xde, 0xfd, 0x7b, 0x7d, 0xea, 0xfd,
|
||||
0x50, 0xc1, 0x7b, 0x32, 0x1e, 0x5c, 0x99, 0xd1, 0x1d, 0x1c, 0xf8, 0x3c, 0x16, 0xa1, 0xba, 0x6a,
|
||||
0x99, 0x9b, 0x06, 0x3a, 0xcc, 0x6f, 0x17, 0x94, 0x2d, 0xf1, 0xff, 0xea, 0xab, 0x12, 0xb8, 0x39,
|
||||
0x38, 0xdf, 0xb3, 0x25, 0x42, 0x8d, 0x4b, 0x91, 0x03, 0xf6, 0x56, 0xa3, 0xb5, 0x0e, 0xf7, 0x07,
|
||||
0x57, 0xb2, 0x18, 0x80, 0x4a, 0xa9, 0x67, 0x69, 0xaa, 0xa6, 0x46, 0xa0, 0x4b, 0x68, 0xe7, 0x24,
|
||||
0xca, 0x68, 0x99, 0xa4, 0xef, 0x99, 0xe1, 0xbd, 0x6a, 0x78, 0x6f, 0xcc, 0xf6, 0xd8, 0x58, 0xdc,
|
||||
0x67, 0xb0, 0xcb, 0x52, 0xc5, 0x33, 0x55, 0x05, 0x4b, 0x0f, 0x57, 0x49, 0x74, 0x0e, 0xbd, 0xba,
|
||||
0xc5, 0x92, 0x91, 0x98, 0xea, 0xdf, 0xd0, 0xc5, 0xff, 0x6a, 0xfa, 0xa8, 0x20, 0x42, 0xd0, 0xda,
|
||||
0x93, 0x38, 0xd2, 0xcf, 0x76, 0xb1, 0x3e, 0x4f, 0xae, 0xa0, 0xc7, 0x65, 0xe0, 0x05, 0x8c, 0x27,
|
||||
0x69, 0xe8, 0xab, 0x09, 0x26, 0x68, 0xa6, 0x76, 0x19, 0xcf, 0x1f, 0xea, 0xba, 0x8b, 0xd1, 0xdc,
|
||||
0xfa, 0x68, 0x34, 0x67, 0xe3, 0xe9, 0xaa, 0xa3, 0x23, 0xde, 0x7c, 0x06, 0x00, 0x00, 0xff, 0xff,
|
||||
0xeb, 0xf3, 0xfa, 0x65, 0x5c, 0x02, 0x00, 0x00,
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,19 +1,18 @@
|
|||
sudo: false
|
||||
language: go
|
||||
go:
|
||||
- 1.6.x
|
||||
- 1.7.x
|
||||
- 1.8.x
|
||||
- 1.9.x
|
||||
- master
|
||||
matrix:
|
||||
allow_failures:
|
||||
- go: master
|
||||
fast_finish: true
|
||||
include:
|
||||
- go: 1.10.x
|
||||
- go: 1.11.x
|
||||
env: GOFMT=1
|
||||
- go: master
|
||||
install:
|
||||
- # Do nothing. This is needed to prevent default install action "go get -t -v ./..." from happening here (we want it to happen inside script step).
|
||||
script:
|
||||
- go get -t -v ./...
|
||||
- diff -u <(echo -n) <(gofmt -d .)
|
||||
- if test -n "${GOFMT}"; then gofmt -w -s . && git diff --exit-code; fi
|
||||
- go tool vet .
|
||||
- go test -v -race ./...
|
||||
|
|
|
|||
|
|
@ -416,14 +416,14 @@ func canStaleOnError(respHeaders, reqHeaders http.Header) bool {
|
|||
func getEndToEndHeaders(respHeaders http.Header) []string {
|
||||
// These headers are always hop-by-hop
|
||||
hopByHopHeaders := map[string]struct{}{
|
||||
"Connection": struct{}{},
|
||||
"Keep-Alive": struct{}{},
|
||||
"Proxy-Authenticate": struct{}{},
|
||||
"Proxy-Authorization": struct{}{},
|
||||
"Te": struct{}{},
|
||||
"Trailers": struct{}{},
|
||||
"Transfer-Encoding": struct{}{},
|
||||
"Upgrade": struct{}{},
|
||||
"Connection": {},
|
||||
"Keep-Alive": {},
|
||||
"Proxy-Authenticate": {},
|
||||
"Proxy-Authorization": {},
|
||||
"Te": {},
|
||||
"Trailers": {},
|
||||
"Transfer-Encoding": {},
|
||||
"Upgrade": {},
|
||||
}
|
||||
|
||||
for _, extra := range strings.Split(respHeaders.Get("connection"), ",") {
|
||||
|
|
@ -433,7 +433,7 @@ func getEndToEndHeaders(respHeaders http.Header) []string {
|
|||
}
|
||||
}
|
||||
endToEndHeaders := []string{}
|
||||
for respHeader, _ := range respHeaders {
|
||||
for respHeader := range respHeaders {
|
||||
if _, ok := hopByHopHeaders[respHeader]; !ok {
|
||||
endToEndHeaders = append(endToEndHeaders, respHeader)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -37,7 +37,7 @@ func (c *Cache) Purge() {
|
|||
c.lock.Unlock()
|
||||
}
|
||||
|
||||
// Add adds a value to the cache. Returns true if an eviction occurred.
|
||||
// Add adds a value to the cache. Returns true if an eviction occurred.
|
||||
func (c *Cache) Add(key, value interface{}) (evicted bool) {
|
||||
c.lock.Lock()
|
||||
evicted = c.lru.Add(key, value)
|
||||
|
|
@ -71,8 +71,8 @@ func (c *Cache) Peek(key interface{}) (value interface{}, ok bool) {
|
|||
return value, ok
|
||||
}
|
||||
|
||||
// ContainsOrAdd checks if a key is in the cache without updating the
|
||||
// recent-ness or deleting it for being stale, and if not, adds the value.
|
||||
// ContainsOrAdd checks if a key is in the cache without updating the
|
||||
// recent-ness or deleting it for being stale, and if not, adds the value.
|
||||
// Returns whether found and whether an eviction occurred.
|
||||
func (c *Cache) ContainsOrAdd(key, value interface{}) (ok, evicted bool) {
|
||||
c.lock.Lock()
|
||||
|
|
@ -85,6 +85,22 @@ func (c *Cache) ContainsOrAdd(key, value interface{}) (ok, evicted bool) {
|
|||
return false, evicted
|
||||
}
|
||||
|
||||
// PeekOrAdd checks if a key is in the cache without updating the
|
||||
// recent-ness or deleting it for being stale, and if not, adds the value.
|
||||
// Returns whether found and whether an eviction occurred.
|
||||
func (c *Cache) PeekOrAdd(key, value interface{}) (previous interface{}, ok, evicted bool) {
|
||||
c.lock.Lock()
|
||||
defer c.lock.Unlock()
|
||||
|
||||
previous, ok = c.lru.Peek(key)
|
||||
if ok {
|
||||
return previous, true, false
|
||||
}
|
||||
|
||||
evicted = c.lru.Add(key, value)
|
||||
return nil, false, evicted
|
||||
}
|
||||
|
||||
// Remove removes the provided key from the cache.
|
||||
func (c *Cache) Remove(key interface{}) (present bool) {
|
||||
c.lock.Lock()
|
||||
|
|
|
|||
|
|
@ -4,4 +4,6 @@ install:
|
|||
- go get golang.org/x/tools/cmd/cover
|
||||
- go get github.com/mattn/goveralls
|
||||
script:
|
||||
- go test -race -v ./...
|
||||
after_script:
|
||||
- $HOME/gopath/bin/goveralls -service=travis-ci -repotoken $COVERALLS_TOKEN
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@ It is ready for production use. [It is used in several projects by Docker, Googl
|
|||
[![Build Status][1]][2]
|
||||
[![Coverage Status][7]][8]
|
||||
[![Sourcegraph][9]][10]
|
||||
[](https://app.fossa.io/projects/git%2Bgithub.com%2Fimdario%2Fmergo?ref=badge_shield)
|
||||
|
||||
[1]: https://travis-ci.org/imdario/mergo.png
|
||||
[2]: https://travis-ci.org/imdario/mergo
|
||||
|
|
@ -27,7 +28,7 @@ It is ready for production use. [It is used in several projects by Docker, Googl
|
|||
|
||||
### Latest release
|
||||
|
||||
[Release v0.3.4](https://github.com/imdario/mergo/releases/tag/v0.3.4).
|
||||
[Release v0.3.7](https://github.com/imdario/mergo/releases/tag/v0.3.7).
|
||||
|
||||
### Important note
|
||||
|
||||
|
|
@ -217,6 +218,21 @@ If I can help you, you have an idea or you are using Mergo in your projects, don
|
|||
|
||||
Written by [Dario Castañé](http://dario.im).
|
||||
|
||||
## Top Contributors
|
||||
|
||||
[](https://sourcerer.io/fame/imdario/imdario/mergo/links/0)
|
||||
[](https://sourcerer.io/fame/imdario/imdario/mergo/links/1)
|
||||
[](https://sourcerer.io/fame/imdario/imdario/mergo/links/2)
|
||||
[](https://sourcerer.io/fame/imdario/imdario/mergo/links/3)
|
||||
[](https://sourcerer.io/fame/imdario/imdario/mergo/links/4)
|
||||
[](https://sourcerer.io/fame/imdario/imdario/mergo/links/5)
|
||||
[](https://sourcerer.io/fame/imdario/imdario/mergo/links/6)
|
||||
[](https://sourcerer.io/fame/imdario/imdario/mergo/links/7)
|
||||
|
||||
|
||||
## License
|
||||
|
||||
[BSD 3-Clause](http://opensource.org/licenses/BSD-3-Clause) license, as [Go language](http://golang.org/LICENSE).
|
||||
|
||||
|
||||
[](https://app.fossa.io/projects/git%2Bgithub.com%2Fimdario%2Fmergo?ref=badge_large)
|
||||
|
|
|
|||
|
|
@ -72,6 +72,7 @@ func deepMap(dst, src reflect.Value, visited map[uintptr]*visit, depth int, conf
|
|||
case reflect.Struct:
|
||||
srcMap := src.Interface().(map[string]interface{})
|
||||
for key := range srcMap {
|
||||
config.overwriteWithEmptyValue = true
|
||||
srcValue := srcMap[key]
|
||||
fieldName := changeInitialCase(key, unicode.ToUpper)
|
||||
dstElement := dst.FieldByName(fieldName)
|
||||
|
|
|
|||
|
|
@ -9,6 +9,7 @@
|
|||
package mergo
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"reflect"
|
||||
)
|
||||
|
||||
|
|
@ -25,9 +26,12 @@ func hasExportedField(dst reflect.Value) (exported bool) {
|
|||
}
|
||||
|
||||
type Config struct {
|
||||
Overwrite bool
|
||||
AppendSlice bool
|
||||
Transformers Transformers
|
||||
Overwrite bool
|
||||
AppendSlice bool
|
||||
TypeCheck bool
|
||||
Transformers Transformers
|
||||
overwriteWithEmptyValue bool
|
||||
overwriteSliceWithEmptyValue bool
|
||||
}
|
||||
|
||||
type Transformers interface {
|
||||
|
|
@ -39,6 +43,10 @@ type Transformers interface {
|
|||
// short circuiting on recursive types.
|
||||
func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, config *Config) (err error) {
|
||||
overwrite := config.Overwrite
|
||||
typeCheck := config.TypeCheck
|
||||
overwriteWithEmptySrc := config.overwriteWithEmptyValue
|
||||
overwriteSliceWithEmptySrc := config.overwriteSliceWithEmptyValue
|
||||
config.overwriteWithEmptyValue = false
|
||||
|
||||
if !src.IsValid() {
|
||||
return
|
||||
|
|
@ -73,7 +81,7 @@ func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, co
|
|||
}
|
||||
}
|
||||
} else {
|
||||
if dst.CanSet() && !isEmptyValue(src) && (overwrite || isEmptyValue(dst)) {
|
||||
if dst.CanSet() && (!isEmptyValue(src) || overwriteWithEmptySrc) && (overwrite || isEmptyValue(dst)) {
|
||||
dst.Set(src)
|
||||
}
|
||||
}
|
||||
|
|
@ -124,19 +132,25 @@ func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, co
|
|||
dstSlice = reflect.ValueOf(dstElement.Interface())
|
||||
}
|
||||
|
||||
if !isEmptyValue(src) && (overwrite || isEmptyValue(dst)) && !config.AppendSlice {
|
||||
if (!isEmptyValue(src) || overwriteWithEmptySrc || overwriteSliceWithEmptySrc) && (overwrite || isEmptyValue(dst)) && !config.AppendSlice {
|
||||
if typeCheck && srcSlice.Type() != dstSlice.Type() {
|
||||
return fmt.Errorf("cannot override two slices with different type (%s, %s)", srcSlice.Type(), dstSlice.Type())
|
||||
}
|
||||
dstSlice = srcSlice
|
||||
} else if config.AppendSlice {
|
||||
if srcSlice.Type() != dstSlice.Type() {
|
||||
return fmt.Errorf("cannot append two slices with different type (%s, %s)", srcSlice.Type(), dstSlice.Type())
|
||||
}
|
||||
dstSlice = reflect.AppendSlice(dstSlice, srcSlice)
|
||||
}
|
||||
dst.SetMapIndex(key, dstSlice)
|
||||
}
|
||||
}
|
||||
if dstElement.IsValid() && reflect.TypeOf(srcElement.Interface()).Kind() == reflect.Map {
|
||||
if dstElement.IsValid() && !isEmptyValue(dstElement) && (reflect.TypeOf(srcElement.Interface()).Kind() == reflect.Map || reflect.TypeOf(srcElement.Interface()).Kind() == reflect.Slice) {
|
||||
continue
|
||||
}
|
||||
|
||||
if srcElement.IsValid() && (overwrite || (!dstElement.IsValid() || isEmptyValue(dstElement))) {
|
||||
if srcElement.IsValid() && ((srcElement.Kind() != reflect.Ptr && overwrite) || !dstElement.IsValid() || isEmptyValue(dstElement)) {
|
||||
if dst.IsNil() {
|
||||
dst.Set(reflect.MakeMap(dst.Type()))
|
||||
}
|
||||
|
|
@ -147,9 +161,12 @@ func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, co
|
|||
if !dst.CanSet() {
|
||||
break
|
||||
}
|
||||
if !isEmptyValue(src) && (overwrite || isEmptyValue(dst)) && !config.AppendSlice {
|
||||
if (!isEmptyValue(src) || overwriteWithEmptySrc || overwriteSliceWithEmptySrc) && (overwrite || isEmptyValue(dst)) && !config.AppendSlice {
|
||||
dst.Set(src)
|
||||
} else if config.AppendSlice {
|
||||
if src.Type() != dst.Type() {
|
||||
return fmt.Errorf("cannot append two slice with different type (%s, %s)", src.Type(), dst.Type())
|
||||
}
|
||||
dst.Set(reflect.AppendSlice(dst, src))
|
||||
}
|
||||
case reflect.Ptr:
|
||||
|
|
@ -158,11 +175,21 @@ func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, co
|
|||
if src.IsNil() {
|
||||
break
|
||||
}
|
||||
if src.Kind() != reflect.Interface {
|
||||
|
||||
if dst.Kind() != reflect.Ptr && src.Type().AssignableTo(dst.Type()) {
|
||||
if dst.IsNil() || overwrite {
|
||||
if dst.CanSet() && (overwrite || isEmptyValue(dst)) {
|
||||
dst.Set(src)
|
||||
}
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
if src.Kind() != reflect.Interface {
|
||||
if dst.IsNil() || (src.Kind() != reflect.Ptr && overwrite) {
|
||||
if dst.CanSet() && (overwrite || isEmptyValue(dst)) {
|
||||
dst.Set(src)
|
||||
}
|
||||
} else if src.Kind() == reflect.Ptr {
|
||||
if err = deepMerge(dst.Elem(), src.Elem(), visited, depth+1, config); err != nil {
|
||||
return
|
||||
|
|
@ -184,10 +211,11 @@ func deepMerge(dst, src reflect.Value, visited map[uintptr]*visit, depth int, co
|
|||
return
|
||||
}
|
||||
default:
|
||||
if dst.CanSet() && !isEmptyValue(src) && (overwrite || isEmptyValue(dst)) {
|
||||
if dst.CanSet() && (!isEmptyValue(src) || overwriteWithEmptySrc) && (overwrite || isEmptyValue(dst)) {
|
||||
dst.Set(src)
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
|
|
@ -199,7 +227,7 @@ func Merge(dst, src interface{}, opts ...func(*Config)) error {
|
|||
return merge(dst, src, opts...)
|
||||
}
|
||||
|
||||
// MergeWithOverwrite will do the same as Merge except that non-empty dst attributes will be overriden by
|
||||
// MergeWithOverwrite will do the same as Merge except that non-empty dst attributes will be overridden by
|
||||
// non-empty src attribute values.
|
||||
// Deprecated: use Merge(…) with WithOverride
|
||||
func MergeWithOverwrite(dst, src interface{}, opts ...func(*Config)) error {
|
||||
|
|
@ -218,11 +246,21 @@ func WithOverride(config *Config) {
|
|||
config.Overwrite = true
|
||||
}
|
||||
|
||||
// WithAppendSlice will make merge append slices instead of overwriting it
|
||||
// WithOverride will make merge override empty dst slice with empty src slice.
|
||||
func WithOverrideEmptySlice(config *Config) {
|
||||
config.overwriteSliceWithEmptyValue = true
|
||||
}
|
||||
|
||||
// WithAppendSlice will make merge append slices instead of overwriting it.
|
||||
func WithAppendSlice(config *Config) {
|
||||
config.AppendSlice = true
|
||||
}
|
||||
|
||||
// WithTypeCheck will make merge check types while overwriting it (must be used with WithOverride).
|
||||
func WithTypeCheck(config *Config) {
|
||||
config.TypeCheck = true
|
||||
}
|
||||
|
||||
func merge(dst, src interface{}, opts ...func(*Config)) error {
|
||||
var (
|
||||
vDst, vSrc reflect.Value
|
||||
|
|
|
|||
|
|
@ -341,7 +341,7 @@ func describeStruct(ctx *ctx, typ reflect2.Type) *StructDescriptor {
|
|||
if ctx.onlyTaggedField && !hastag && !field.Anonymous() {
|
||||
continue
|
||||
}
|
||||
if tag == "-" {
|
||||
if tag == "-" || field.Name() == "_" {
|
||||
continue
|
||||
}
|
||||
tagParts := strings.Split(tag, ",")
|
||||
|
|
|
|||
|
|
@ -290,16 +290,17 @@ func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
|||
stream.WriteObjectStart()
|
||||
mapIter := encoder.mapType.UnsafeIterate(ptr)
|
||||
subStream := stream.cfg.BorrowStream(nil)
|
||||
subStream.Attachment = stream.Attachment
|
||||
subIter := stream.cfg.BorrowIterator(nil)
|
||||
keyValues := encodedKeyValues{}
|
||||
for mapIter.HasNext() {
|
||||
subStream.buf = make([]byte, 0, 64)
|
||||
key, elem := mapIter.UnsafeNext()
|
||||
subStreamIndex := subStream.Buffered()
|
||||
encoder.keyEncoder.Encode(key, subStream)
|
||||
if subStream.Error != nil && subStream.Error != io.EOF && stream.Error == nil {
|
||||
stream.Error = subStream.Error
|
||||
}
|
||||
encodedKey := subStream.Buffer()
|
||||
encodedKey := subStream.Buffer()[subStreamIndex:]
|
||||
subIter.ResetBytes(encodedKey)
|
||||
decodedKey := subIter.ReadString()
|
||||
if stream.indention > 0 {
|
||||
|
|
@ -310,7 +311,7 @@ func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
|||
encoder.elemEncoder.Encode(elem, subStream)
|
||||
keyValues = append(keyValues, encodedKV{
|
||||
key: decodedKey,
|
||||
keyValue: subStream.Buffer(),
|
||||
keyValue: subStream.Buffer()[subStreamIndex:],
|
||||
})
|
||||
}
|
||||
sort.Sort(keyValues)
|
||||
|
|
@ -320,6 +321,9 @@ func (encoder *sortKeysMapEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
|||
}
|
||||
stream.Write(keyValue.keyValue)
|
||||
}
|
||||
if subStream.Error != nil && stream.Error == nil {
|
||||
stream.Error = subStream.Error
|
||||
}
|
||||
stream.WriteObjectEnd()
|
||||
stream.cfg.ReturnStream(subStream)
|
||||
stream.cfg.ReturnIterator(subIter)
|
||||
|
|
|
|||
|
|
@ -200,6 +200,7 @@ type stringModeStringEncoder struct {
|
|||
|
||||
func (encoder *stringModeStringEncoder) Encode(ptr unsafe.Pointer, stream *Stream) {
|
||||
tempStream := encoder.cfg.BorrowStream(nil)
|
||||
tempStream.Attachment = stream.Attachment
|
||||
defer encoder.cfg.ReturnStream(tempStream)
|
||||
encoder.elemEncoder.Encode(ptr, tempStream)
|
||||
stream.WriteString(string(tempStream.Buffer()))
|
||||
|
|
|
|||
|
|
@ -7,4 +7,6 @@ go:
|
|||
- 1.8.x
|
||||
- 1.9.x
|
||||
- "1.10.x"
|
||||
- "1.11.x"
|
||||
- "1.12.x"
|
||||
- tip
|
||||
|
|
|
|||
|
|
@ -1,5 +1,13 @@
|
|||
## Changelog
|
||||
|
||||
### [1.8.1](https://github.com/magiconair/properties/tree/v1.8.1) - 10 May 2019
|
||||
|
||||
* [PR #26](https://github.com/magiconair/properties/pull/35): Close body always after request
|
||||
|
||||
This patch ensures that in `LoadURL` the response body is always closed.
|
||||
|
||||
Thanks to [@liubog2008](https://github.com/liubog2008) for the patch.
|
||||
|
||||
### [1.8](https://github.com/magiconair/properties/tree/v1.8) - 15 May 2018
|
||||
|
||||
* [PR #26](https://github.com/magiconair/properties/pull/26): Disable expansion during loading
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
[](https://github.com/magiconair/properties/releases)
|
||||
[](https://travis-ci.org/magiconair/properties)
|
||||
[](https://app.codeship.com/projects/274177")
|
||||
[](https://circleci.com/gh/magiconair/properties)
|
||||
[](https://raw.githubusercontent.com/magiconair/properties/master/LICENSE)
|
||||
[](http://godoc.org/github.com/magiconair/properties)
|
||||
|
||||
|
|
@ -30,7 +30,7 @@ changed from `panic` to `log.Fatal` but this is configurable and custom
|
|||
error handling functions can be provided. See the package documentation for
|
||||
details.
|
||||
|
||||
Read the full documentation on [GoDoc](https://godoc.org/github.com/magiconair/properties) [](https://godoc.org/github.com/magiconair/properties)
|
||||
Read the full documentation on [](http://godoc.org/github.com/magiconair/properties)
|
||||
|
||||
## Getting Started
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1 @@
|
|||
module github.com/magiconair/properties
|
||||
|
|
@ -115,6 +115,7 @@ func (l *Loader) LoadURL(url string) (*Properties, error) {
|
|||
if err != nil {
|
||||
return nil, fmt.Errorf("properties: error fetching %q. %s", url, err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
if resp.StatusCode == 404 && l.IgnoreMissing {
|
||||
LogPrintf("properties: %s returned %d. skipping", url, resp.StatusCode)
|
||||
|
|
@ -129,7 +130,6 @@ func (l *Loader) LoadURL(url string) (*Properties, error) {
|
|||
if err != nil {
|
||||
return nil, fmt.Errorf("properties: %s error reading response. %s", url, err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
|
||||
ct := resp.Header.Get("Content-Type")
|
||||
var enc Encoding
|
||||
|
|
|
|||
|
|
@ -270,16 +270,25 @@ func (w *Writer) Bool(v bool) {
|
|||
|
||||
const chars = "0123456789abcdef"
|
||||
|
||||
func isNotEscapedSingleChar(c byte, escapeHTML bool) bool {
|
||||
// Note: might make sense to use a table if there are more chars to escape. With 4 chars
|
||||
// it benchmarks the same.
|
||||
if escapeHTML {
|
||||
return c != '<' && c != '>' && c != '&' && c != '\\' && c != '"' && c >= 0x20 && c < utf8.RuneSelf
|
||||
} else {
|
||||
return c != '\\' && c != '"' && c >= 0x20 && c < utf8.RuneSelf
|
||||
func getTable(falseValues ...int) [128]bool {
|
||||
table := [128]bool{}
|
||||
|
||||
for i := 0; i < 128; i++ {
|
||||
table[i] = true
|
||||
}
|
||||
|
||||
for _, v := range falseValues {
|
||||
table[v] = false
|
||||
}
|
||||
|
||||
return table
|
||||
}
|
||||
|
||||
var (
|
||||
htmlEscapeTable = getTable(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, '"', '&', '<', '>', '\\')
|
||||
htmlNoEscapeTable = getTable(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, '"', '\\')
|
||||
)
|
||||
|
||||
func (w *Writer) String(s string) {
|
||||
w.Buffer.AppendByte('"')
|
||||
|
||||
|
|
@ -288,15 +297,23 @@ func (w *Writer) String(s string) {
|
|||
|
||||
p := 0 // last non-escape symbol
|
||||
|
||||
var escapeTable [128]bool
|
||||
if w.NoEscapeHTML {
|
||||
escapeTable = htmlNoEscapeTable
|
||||
} else {
|
||||
escapeTable = htmlEscapeTable
|
||||
}
|
||||
|
||||
for i := 0; i < len(s); {
|
||||
c := s[i]
|
||||
|
||||
if isNotEscapedSingleChar(c, !w.NoEscapeHTML) {
|
||||
// single-width character, no escaping is required
|
||||
i++
|
||||
continue
|
||||
} else if c < utf8.RuneSelf {
|
||||
// single-with character, need to escape
|
||||
if c < utf8.RuneSelf {
|
||||
if escapeTable[c] {
|
||||
// single-width character, no escaping is required
|
||||
i++
|
||||
continue
|
||||
}
|
||||
|
||||
w.Buffer.AppendString(s[p:i])
|
||||
switch c {
|
||||
case '\t':
|
||||
|
|
|
|||
|
|
@ -0,0 +1,2 @@
|
|||
cmd/tomll/tomll
|
||||
cmd/tomljson/tomljson
|
||||
|
|
@ -1,2 +1,5 @@
|
|||
test_program/test_program_bin
|
||||
fuzz/
|
||||
cmd/tomll/tomll
|
||||
cmd/tomljson/tomljson
|
||||
cmd/tomltestgen/tomltestgen
|
||||
|
|
|
|||
|
|
@ -1,23 +0,0 @@
|
|||
sudo: false
|
||||
language: go
|
||||
go:
|
||||
- 1.8.x
|
||||
- 1.9.x
|
||||
- 1.10.x
|
||||
- tip
|
||||
matrix:
|
||||
allow_failures:
|
||||
- go: tip
|
||||
fast_finish: true
|
||||
script:
|
||||
- if [ -n "$(go fmt ./...)" ]; then exit 1; fi
|
||||
- ./test.sh
|
||||
- ./benchmark.sh $TRAVIS_BRANCH https://github.com/$TRAVIS_REPO_SLUG.git
|
||||
before_install:
|
||||
- go get github.com/axw/gocov/gocov
|
||||
- go get github.com/mattn/goveralls
|
||||
- if ! go get code.google.com/p/go.tools/cmd/cover; then go get golang.org/x/tools/cmd/cover; fi
|
||||
branches:
|
||||
only: [master]
|
||||
after_success:
|
||||
- $HOME/gopath/bin/goveralls -service=travis-ci -coverprofile=coverage.out -repotoken $COVERALLS_TOKEN
|
||||
|
|
@ -0,0 +1,132 @@
|
|||
## Contributing
|
||||
|
||||
Thank you for your interest in go-toml! We appreciate you considering
|
||||
contributing to go-toml!
|
||||
|
||||
The main goal is the project is to provide an easy-to-use TOML
|
||||
implementation for Go that gets the job done and gets out of your way –
|
||||
dealing with TOML is probably not the central piece of your project.
|
||||
|
||||
As the single maintainer of go-toml, time is scarce. All help, big or
|
||||
small, is more than welcomed!
|
||||
|
||||
### Ask questions
|
||||
|
||||
Any question you may have, somebody else might have it too. Always feel
|
||||
free to ask them on the [issues tracker][issues-tracker]. We will try to
|
||||
answer them as clearly and quickly as possible, time permitting.
|
||||
|
||||
Asking questions also helps us identify areas where the documentation needs
|
||||
improvement, or new features that weren't envisioned before. Sometimes, a
|
||||
seemingly innocent question leads to the fix of a bug. Don't hesitate and
|
||||
ask away!
|
||||
|
||||
### Improve the documentation
|
||||
|
||||
The best way to share your knowledge and experience with go-toml is to
|
||||
improve the documentation. Fix a typo, clarify an interface, add an
|
||||
example, anything goes!
|
||||
|
||||
The documentation is present in the [README][readme] and thorough the
|
||||
source code. On release, it gets updated on [GoDoc][godoc]. To make a
|
||||
change to the documentation, create a pull request with your proposed
|
||||
changes. For simple changes like that, the easiest way to go is probably
|
||||
the "Fork this project and edit the file" button on Github, displayed at
|
||||
the top right of the file. Unless it's a trivial change (for example a
|
||||
typo), provide a little bit of context in your pull request description or
|
||||
commit message.
|
||||
|
||||
### Report a bug
|
||||
|
||||
Found a bug! Sorry to hear that :(. Help us and other track them down and
|
||||
fix by reporting it. [File a new bug report][bug-report] on the [issues
|
||||
tracker][issues-tracker]. The template should provide enough guidance on
|
||||
what to include. When in doubt: add more details! By reducing ambiguity and
|
||||
providing more information, it decreases back and forth and saves everyone
|
||||
time.
|
||||
|
||||
### Code changes
|
||||
|
||||
Want to contribute a patch? Very happy to hear that!
|
||||
|
||||
First, some high-level rules:
|
||||
|
||||
* A short proposal with some POC code is better than a lengthy piece of
|
||||
text with no code. Code speaks louder than words.
|
||||
* No backward-incompatible patch will be accepted unless discussed.
|
||||
Sometimes it's hard, and Go's lack of versioning by default does not
|
||||
help, but we try not to break people's programs unless we absolutely have
|
||||
to.
|
||||
* If you are writing a new feature or extending an existing one, make sure
|
||||
to write some documentation.
|
||||
* Bug fixes need to be accompanied with regression tests.
|
||||
* New code needs to be tested.
|
||||
* Your commit messages need to explain why the change is needed, even if
|
||||
already included in the PR description.
|
||||
|
||||
It does sound like a lot, but those best practices are here to save time
|
||||
overall and continuously improve the quality of the project, which is
|
||||
something everyone benefits from.
|
||||
|
||||
#### Get started
|
||||
|
||||
The fairly standard code contribution process looks like that:
|
||||
|
||||
1. [Fork the project][fork].
|
||||
2. Make your changes, commit on any branch you like.
|
||||
3. [Open up a pull request][pull-request]
|
||||
4. Review, potential ask for changes.
|
||||
5. Merge. You're in!
|
||||
|
||||
Feel free to ask for help! You can create draft pull requests to gather
|
||||
some early feedback!
|
||||
|
||||
#### Run the tests
|
||||
|
||||
You can run tests for go-toml using Go's test tool: `go test ./...`.
|
||||
When creating a pull requests, all tests will be ran on Linux on a few Go
|
||||
versions (Travis CI), and on Windows using the latest Go version
|
||||
(AppVeyor).
|
||||
|
||||
#### Style
|
||||
|
||||
Try to look around and follow the same format and structure as the rest of
|
||||
the code. We enforce using `go fmt` on the whole code base.
|
||||
|
||||
---
|
||||
|
||||
### Maintainers-only
|
||||
|
||||
#### Merge pull request
|
||||
|
||||
Checklist:
|
||||
|
||||
* Passing CI.
|
||||
* Does not introduce backward-incompatible changes (unless discussed).
|
||||
* Has relevant doc changes.
|
||||
* Has relevant unit tests.
|
||||
|
||||
1. Merge using "squash and merge".
|
||||
2. Make sure to edit the commit message to keep all the useful information
|
||||
nice and clean.
|
||||
3. Make sure the commit title is clear and contains the PR number (#123).
|
||||
|
||||
#### New release
|
||||
|
||||
1. Go to [releases][releases]. Click on "X commits to master since this
|
||||
release".
|
||||
2. Make note of all the changes. Look for backward incompatible changes,
|
||||
new features, and bug fixes.
|
||||
3. Pick the new version using the above and semver.
|
||||
4. Create a [new release][new-release].
|
||||
5. Follow the same format as [1.1.0][release-110].
|
||||
|
||||
[issues-tracker]: https://github.com/pelletier/go-toml/issues
|
||||
[bug-report]: https://github.com/pelletier/go-toml/issues/new?template=bug_report.md
|
||||
[godoc]: https://godoc.org/github.com/pelletier/go-toml
|
||||
[readme]: ./README.md
|
||||
[fork]: https://help.github.com/articles/fork-a-repo
|
||||
[pull-request]: https://help.github.com/en/articles/creating-a-pull-request
|
||||
[releases]: https://github.com/pelletier/go-toml/releases
|
||||
[new-release]: https://github.com/pelletier/go-toml/releases/new
|
||||
[release-110]: https://github.com/pelletier/go-toml/releases/tag/v1.1.0
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
FROM golang:1.12-alpine3.9 as builder
|
||||
WORKDIR /go/src/github.com/pelletier/go-toml
|
||||
COPY . .
|
||||
ENV CGO_ENABLED=0
|
||||
ENV GOOS=linux
|
||||
RUN go install ./...
|
||||
|
||||
FROM scratch
|
||||
COPY --from=builder /go/bin/tomll /usr/bin/tomll
|
||||
COPY --from=builder /go/bin/tomljson /usr/bin/tomljson
|
||||
COPY --from=builder /go/bin/jsontoml /usr/bin/jsontoml
|
||||
|
|
@ -0,0 +1,29 @@
|
|||
export CGO_ENABLED=0
|
||||
go := go
|
||||
go.goos ?= $(shell echo `go version`|cut -f4 -d ' '|cut -d '/' -f1)
|
||||
go.goarch ?= $(shell echo `go version`|cut -f4 -d ' '|cut -d '/' -f2)
|
||||
|
||||
out.tools := tomll tomljson jsontoml
|
||||
out.dist := $(out.tools:=_$(go.goos)_$(go.goarch).tar.xz)
|
||||
sources := $(wildcard **/*.go)
|
||||
|
||||
|
||||
.PHONY:
|
||||
tools: $(out.tools)
|
||||
|
||||
$(out.tools): $(sources)
|
||||
GOOS=$(go.goos) GOARCH=$(go.goarch) $(go) build ./cmd/$@
|
||||
|
||||
.PHONY:
|
||||
dist: $(out.dist)
|
||||
|
||||
$(out.dist):%_$(go.goos)_$(go.goarch).tar.xz: %
|
||||
if [ "$(go.goos)" = "windows" ]; then \
|
||||
tar -cJf $@ $^.exe; \
|
||||
else \
|
||||
tar -cJf $@ $^; \
|
||||
fi
|
||||
|
||||
.PHONY:
|
||||
clean:
|
||||
rm -rf $(out.tools) $(out.dist)
|
||||
|
|
@ -0,0 +1,5 @@
|
|||
**Issue:** add link to pelletier/go-toml issue here
|
||||
|
||||
Explanation of what this pull request does.
|
||||
|
||||
More detailed description of the decisions being made and the reasons why (if the patch is non-trivial).
|
||||
|
|
@ -3,13 +3,14 @@
|
|||
Go library for the [TOML](https://github.com/mojombo/toml) format.
|
||||
|
||||
This library supports TOML version
|
||||
[v0.4.0](https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md)
|
||||
[v1.0.0-rc.1](https://github.com/toml-lang/toml/blob/master/versions/en/toml-v1.0.0-rc.1.md)
|
||||
|
||||
[](http://godoc.org/github.com/pelletier/go-toml)
|
||||
[](https://github.com/pelletier/go-toml/blob/master/LICENSE)
|
||||
[](https://travis-ci.org/pelletier/go-toml)
|
||||
[](https://coveralls.io/github/pelletier/go-toml?branch=master)
|
||||
[](https://dev.azure.com/pelletierthomas/go-toml-ci/_build/latest?definitionId=1&branchName=master)
|
||||
[](https://codecov.io/gh/pelletier/go-toml)
|
||||
[](https://goreportcard.com/report/github.com/pelletier/go-toml)
|
||||
[](https://app.fossa.io/projects/git%2Bgithub.com%2Fpelletier%2Fgo-toml?ref=badge_shield)
|
||||
|
||||
## Features
|
||||
|
||||
|
|
@ -17,7 +18,7 @@ Go-toml provides the following features for using data parsed from TOML document
|
|||
|
||||
* Load TOML documents from files and string data
|
||||
* Easily navigate TOML structure using Tree
|
||||
* Mashaling and unmarshaling to and from data structures
|
||||
* Marshaling and unmarshaling to and from data structures
|
||||
* Line & column position data for all parsed elements
|
||||
* [Query support similar to JSON-Path](query/)
|
||||
* Syntax errors contain line and column numbers
|
||||
|
|
@ -73,7 +74,7 @@ Or use a query:
|
|||
q, _ := query.Compile("$..[user,password]")
|
||||
results := q.Execute(config)
|
||||
for ii, item := range results.Values() {
|
||||
fmt.Println("Query result %d: %v", ii, item)
|
||||
fmt.Printf("Query result %d: %v\n", ii, item)
|
||||
}
|
||||
```
|
||||
|
||||
|
|
@ -86,7 +87,7 @@ The documentation and additional examples are available at
|
|||
|
||||
Go-toml provides two handy command line tools:
|
||||
|
||||
* `tomll`: Reads TOML files and lint them.
|
||||
* `tomll`: Reads TOML files and lints them.
|
||||
|
||||
```
|
||||
go install github.com/pelletier/go-toml/cmd/tomll
|
||||
|
|
@ -99,6 +100,30 @@ Go-toml provides two handy command line tools:
|
|||
tomljson --help
|
||||
```
|
||||
|
||||
* `jsontoml`: Reads a JSON file and outputs a TOML representation.
|
||||
|
||||
```
|
||||
go install github.com/pelletier/go-toml/cmd/jsontoml
|
||||
jsontoml --help
|
||||
```
|
||||
|
||||
### Docker image
|
||||
|
||||
Those tools are also availble as a Docker image from
|
||||
[dockerhub](https://hub.docker.com/r/pelletier/go-toml). For example, to
|
||||
use `tomljson`:
|
||||
|
||||
```
|
||||
docker run -v $PWD:/workdir pelletier/go-toml tomljson /workdir/example.toml
|
||||
```
|
||||
|
||||
Only master (`latest`) and tagged versions are published to dockerhub. You
|
||||
can build your own image as usual:
|
||||
|
||||
```
|
||||
docker build -t go-toml .
|
||||
```
|
||||
|
||||
## Contribute
|
||||
|
||||
Feel free to report bugs and patches using GitHub's pull requests system on
|
||||
|
|
@ -107,12 +132,7 @@ much appreciated!
|
|||
|
||||
### Run tests
|
||||
|
||||
You have to make sure two kind of tests run:
|
||||
|
||||
1. The Go unit tests
|
||||
2. The TOML examples base
|
||||
|
||||
You can run both of them using `./test.sh`.
|
||||
`go test ./...`
|
||||
|
||||
### Fuzzing
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,230 @@
|
|||
trigger:
|
||||
- master
|
||||
|
||||
stages:
|
||||
- stage: fuzzit
|
||||
displayName: "Run Fuzzit"
|
||||
dependsOn: []
|
||||
condition: and(succeeded(), eq(variables['Build.SourceBranchName'], 'master'))
|
||||
jobs:
|
||||
- job: submit
|
||||
displayName: "Submit"
|
||||
pool:
|
||||
vmImage: ubuntu-latest
|
||||
steps:
|
||||
- task: GoTool@0
|
||||
displayName: "Install Go 1.14"
|
||||
inputs:
|
||||
version: "1.14"
|
||||
- script: echo "##vso[task.setvariable variable=PATH]${PATH}:/home/vsts/go/bin/"
|
||||
- script: mkdir -p ${HOME}/go/src/github.com/pelletier/go-toml
|
||||
- script: cp -R . ${HOME}/go/src/github.com/pelletier/go-toml
|
||||
- task: Bash@3
|
||||
inputs:
|
||||
filePath: './fuzzit.sh'
|
||||
env:
|
||||
TYPE: fuzzing
|
||||
FUZZIT_API_KEY: $(FUZZIT_API_KEY)
|
||||
|
||||
- stage: run_checks
|
||||
displayName: "Check"
|
||||
dependsOn: []
|
||||
jobs:
|
||||
- job: fmt
|
||||
displayName: "fmt"
|
||||
pool:
|
||||
vmImage: ubuntu-latest
|
||||
steps:
|
||||
- task: GoTool@0
|
||||
displayName: "Install Go 1.14"
|
||||
inputs:
|
||||
version: "1.14"
|
||||
- task: Go@0
|
||||
displayName: "go fmt ./..."
|
||||
inputs:
|
||||
command: 'custom'
|
||||
customCommand: 'fmt'
|
||||
arguments: './...'
|
||||
- job: coverage
|
||||
displayName: "coverage"
|
||||
pool:
|
||||
vmImage: ubuntu-latest
|
||||
steps:
|
||||
- task: GoTool@0
|
||||
displayName: "Install Go 1.14"
|
||||
inputs:
|
||||
version: "1.14"
|
||||
- task: Go@0
|
||||
displayName: "Generate coverage"
|
||||
inputs:
|
||||
command: 'test'
|
||||
arguments: "-race -coverprofile=coverage.txt -covermode=atomic"
|
||||
- task: Bash@3
|
||||
inputs:
|
||||
targetType: 'inline'
|
||||
script: 'bash <(curl -s https://codecov.io/bash) -t ${CODECOV_TOKEN}'
|
||||
env:
|
||||
CODECOV_TOKEN: $(CODECOV_TOKEN)
|
||||
- job: benchmark
|
||||
displayName: "benchmark"
|
||||
pool:
|
||||
vmImage: ubuntu-latest
|
||||
steps:
|
||||
- task: GoTool@0
|
||||
displayName: "Install Go 1.14"
|
||||
inputs:
|
||||
version: "1.14"
|
||||
- script: echo "##vso[task.setvariable variable=PATH]${PATH}:/home/vsts/go/bin/"
|
||||
- task: Bash@3
|
||||
inputs:
|
||||
filePath: './benchmark.sh'
|
||||
arguments: "master $(Build.Repository.Uri)"
|
||||
|
||||
- job: fuzzing
|
||||
displayName: "fuzzing"
|
||||
pool:
|
||||
vmImage: ubuntu-latest
|
||||
steps:
|
||||
- task: GoTool@0
|
||||
displayName: "Install Go 1.14"
|
||||
inputs:
|
||||
version: "1.14"
|
||||
- script: echo "##vso[task.setvariable variable=PATH]${PATH}:/home/vsts/go/bin/"
|
||||
- script: mkdir -p ${HOME}/go/src/github.com/pelletier/go-toml
|
||||
- script: cp -R . ${HOME}/go/src/github.com/pelletier/go-toml
|
||||
- task: Bash@3
|
||||
inputs:
|
||||
filePath: './fuzzit.sh'
|
||||
env:
|
||||
TYPE: local-regression
|
||||
|
||||
- job: go_unit_tests
|
||||
displayName: "unit tests"
|
||||
strategy:
|
||||
matrix:
|
||||
linux 1.14:
|
||||
goVersion: '1.14'
|
||||
imageName: 'ubuntu-latest'
|
||||
mac 1.14:
|
||||
goVersion: '1.14'
|
||||
imageName: 'macOS-latest'
|
||||
windows 1.14:
|
||||
goVersion: '1.14'
|
||||
imageName: 'windows-latest'
|
||||
linux 1.13:
|
||||
goVersion: '1.13'
|
||||
imageName: 'ubuntu-latest'
|
||||
mac 1.13:
|
||||
goVersion: '1.13'
|
||||
imageName: 'macOS-latest'
|
||||
windows 1.13:
|
||||
goVersion: '1.13'
|
||||
imageName: 'windows-latest'
|
||||
pool:
|
||||
vmImage: $(imageName)
|
||||
steps:
|
||||
- task: GoTool@0
|
||||
displayName: "Install Go $(goVersion)"
|
||||
inputs:
|
||||
version: $(goVersion)
|
||||
- task: Go@0
|
||||
displayName: "go test ./..."
|
||||
inputs:
|
||||
command: 'test'
|
||||
arguments: './...'
|
||||
- stage: build_binaries
|
||||
displayName: "Build binaries"
|
||||
dependsOn: run_checks
|
||||
jobs:
|
||||
- job: build_binary
|
||||
displayName: "Build binary"
|
||||
strategy:
|
||||
matrix:
|
||||
linux_amd64:
|
||||
GOOS: linux
|
||||
GOARCH: amd64
|
||||
darwin_amd64:
|
||||
GOOS: darwin
|
||||
GOARCH: amd64
|
||||
windows_amd64:
|
||||
GOOS: windows
|
||||
GOARCH: amd64
|
||||
pool:
|
||||
vmImage: ubuntu-latest
|
||||
steps:
|
||||
- task: GoTool@0
|
||||
displayName: "Install Go"
|
||||
inputs:
|
||||
version: 1.14
|
||||
- task: Bash@3
|
||||
inputs:
|
||||
targetType: inline
|
||||
script: "make dist"
|
||||
env:
|
||||
go.goos: $(GOOS)
|
||||
go.goarch: $(GOARCH)
|
||||
- task: CopyFiles@2
|
||||
inputs:
|
||||
sourceFolder: '$(Build.SourcesDirectory)'
|
||||
contents: '*.tar.xz'
|
||||
TargetFolder: '$(Build.ArtifactStagingDirectory)'
|
||||
- task: PublishBuildArtifacts@1
|
||||
inputs:
|
||||
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
|
||||
artifactName: binaries
|
||||
- stage: build_binaries_manifest
|
||||
displayName: "Build binaries manifest"
|
||||
dependsOn: build_binaries
|
||||
jobs:
|
||||
- job: build_manifest
|
||||
displayName: "Build binaries manifest"
|
||||
steps:
|
||||
- task: DownloadBuildArtifacts@0
|
||||
inputs:
|
||||
buildType: 'current'
|
||||
downloadType: 'single'
|
||||
artifactName: 'binaries'
|
||||
downloadPath: '$(Build.SourcesDirectory)'
|
||||
- task: Bash@3
|
||||
inputs:
|
||||
targetType: inline
|
||||
script: "cd binaries && sha256sum --binary *.tar.xz | tee $(Build.ArtifactStagingDirectory)/sha256sums.txt"
|
||||
- task: PublishBuildArtifacts@1
|
||||
inputs:
|
||||
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
|
||||
artifactName: manifest
|
||||
|
||||
- stage: build_docker_image
|
||||
displayName: "Build Docker image"
|
||||
dependsOn: run_checks
|
||||
jobs:
|
||||
- job: build
|
||||
displayName: "Build"
|
||||
pool:
|
||||
vmImage: ubuntu-latest
|
||||
steps:
|
||||
- task: Docker@2
|
||||
inputs:
|
||||
command: 'build'
|
||||
Dockerfile: 'Dockerfile'
|
||||
buildContext: '.'
|
||||
addPipelineData: false
|
||||
|
||||
- stage: publish_docker_image
|
||||
displayName: "Publish Docker image"
|
||||
dependsOn: build_docker_image
|
||||
condition: and(succeeded(), eq(variables['Build.SourceBranchName'], 'master'))
|
||||
jobs:
|
||||
- job: publish
|
||||
displayName: "Publish"
|
||||
pool:
|
||||
vmImage: ubuntu-latest
|
||||
steps:
|
||||
- task: Docker@2
|
||||
inputs:
|
||||
containerRegistry: 'DockerHub'
|
||||
repository: 'pelletier/go-toml'
|
||||
command: 'buildAndPush'
|
||||
Dockerfile: 'Dockerfile'
|
||||
buildContext: '.'
|
||||
tags: 'latest'
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
set -ex
|
||||
|
||||
reference_ref=${1:-master}
|
||||
reference_git=${2:-.}
|
||||
|
|
@ -8,7 +8,6 @@ reference_git=${2:-.}
|
|||
if ! `hash benchstat 2>/dev/null`; then
|
||||
echo "Installing benchstat"
|
||||
go get golang.org/x/perf/cmd/benchstat
|
||||
go install golang.org/x/perf/cmd/benchstat
|
||||
fi
|
||||
|
||||
tempdir=`mktemp -d /tmp/go-toml-benchmark-XXXXXX`
|
||||
|
|
@ -29,4 +28,4 @@ go test -bench=. -benchmem | tee ${local_benchmark}
|
|||
|
||||
echo ""
|
||||
echo "=== diff"
|
||||
benchstat -delta-test=none ${ref_benchmark} ${local_benchmark}
|
||||
benchstat -delta-test=none ${ref_benchmark} ${local_benchmark}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
// Package toml is a TOML parser and manipulation library.
|
||||
//
|
||||
// This version supports the specification as described in
|
||||
// https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md
|
||||
// https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.5.0.md
|
||||
//
|
||||
// Marshaling
|
||||
//
|
||||
|
|
|
|||
|
|
@ -27,3 +27,4 @@ enabled = true
|
|||
|
||||
[clients]
|
||||
data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
|
||||
score = 4e-08 # to make sure leading zeroes in exponent parts of floats are supported
|
||||
|
|
@ -27,3 +27,4 @@ enabled = true
|
|||
|
||||
[clients]
|
||||
data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it
|
||||
score = 4e-08 # to make sure leading zeroes in exponent parts of floats are supported
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
#!/bin/bash
|
||||
set -xe
|
||||
|
||||
# go-fuzz doesn't support modules yet, so ensure we do everything
|
||||
# in the old style GOPATH way
|
||||
export GO111MODULE="off"
|
||||
|
||||
# install go-fuzz
|
||||
go get -u github.com/dvyukov/go-fuzz/go-fuzz github.com/dvyukov/go-fuzz/go-fuzz-build
|
||||
|
||||
# target name can only contain lower-case letters (a-z), digits (0-9) and a dash (-)
|
||||
# to add another target, make sure to create it with `fuzzit create target`
|
||||
# before using `fuzzit create job`
|
||||
TARGET=toml-fuzzer
|
||||
|
||||
go-fuzz-build -libfuzzer -o ${TARGET}.a github.com/pelletier/go-toml
|
||||
clang -fsanitize=fuzzer ${TARGET}.a -o ${TARGET}
|
||||
|
||||
# install fuzzit for talking to fuzzit.dev service
|
||||
# or latest version:
|
||||
# https://github.com/fuzzitdev/fuzzit/releases/latest/download/fuzzit_Linux_x86_64
|
||||
wget -q -O fuzzit https://github.com/fuzzitdev/fuzzit/releases/download/v2.4.52/fuzzit_Linux_x86_64
|
||||
chmod a+x fuzzit
|
||||
|
||||
# TODO: change kkowalczyk to go-toml and create toml-fuzzer target there
|
||||
./fuzzit create job --type $TYPE go-toml/${TARGET} ${TARGET}
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
module github.com/pelletier/go-toml
|
||||
|
||||
go 1.12
|
||||
|
||||
require (
|
||||
github.com/BurntSushi/toml v0.3.1
|
||||
github.com/davecgh/go-spew v1.1.1
|
||||
gopkg.in/yaml.v2 v2.3.0
|
||||
)
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.3 h1:fvjTMHxHEw/mxHbtzPi3JCcKXQRAnQTBRo6YCJSVHKI=
|
||||
gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.4 h1:/eiJrUcujPVeJ3xlSWaiNi3uSVmDGBK1pDHUHAnao1I=
|
||||
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.5 h1:ymVxjfMaHvXD8RqPRmzHHsB3VvucivSkIAvJFDI5O3c=
|
||||
gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.7 h1:VUgggvou5XRW9mHwD/yXxIYSMtY0zoKQf/v226p2nyo=
|
||||
gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
|
||||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
|
||||
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
|
|
@ -3,83 +3,110 @@
|
|||
package toml
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
// Convert the bare key group string to an array.
|
||||
// The input supports double quotation to allow "." inside the key name,
|
||||
// The input supports double quotation and single quotation,
|
||||
// but escape sequences are not supported. Lexers must unescape them beforehand.
|
||||
func parseKey(key string) ([]string, error) {
|
||||
groups := []string{}
|
||||
var buffer bytes.Buffer
|
||||
inQuotes := false
|
||||
wasInQuotes := false
|
||||
ignoreSpace := true
|
||||
expectDot := false
|
||||
runes := []rune(key)
|
||||
var groups []string
|
||||
|
||||
for _, char := range key {
|
||||
if ignoreSpace {
|
||||
if char == ' ' {
|
||||
continue
|
||||
}
|
||||
ignoreSpace = false
|
||||
if len(key) == 0 {
|
||||
return nil, errors.New("empty key")
|
||||
}
|
||||
|
||||
idx := 0
|
||||
for idx < len(runes) {
|
||||
for ; idx < len(runes) && isSpace(runes[idx]); idx++ {
|
||||
// skip leading whitespace
|
||||
}
|
||||
switch char {
|
||||
case '"':
|
||||
if inQuotes {
|
||||
groups = append(groups, buffer.String())
|
||||
buffer.Reset()
|
||||
wasInQuotes = true
|
||||
}
|
||||
inQuotes = !inQuotes
|
||||
expectDot = false
|
||||
case '.':
|
||||
if inQuotes {
|
||||
buffer.WriteRune(char)
|
||||
} else {
|
||||
if !wasInQuotes {
|
||||
if buffer.Len() == 0 {
|
||||
return nil, errors.New("empty table key")
|
||||
if idx >= len(runes) {
|
||||
break
|
||||
}
|
||||
r := runes[idx]
|
||||
if isValidBareChar(r) {
|
||||
// parse bare key
|
||||
startIdx := idx
|
||||
endIdx := -1
|
||||
idx++
|
||||
for idx < len(runes) {
|
||||
r = runes[idx]
|
||||
if isValidBareChar(r) {
|
||||
idx++
|
||||
} else if r == '.' {
|
||||
endIdx = idx
|
||||
break
|
||||
} else if isSpace(r) {
|
||||
endIdx = idx
|
||||
for ; idx < len(runes) && isSpace(runes[idx]); idx++ {
|
||||
// skip trailing whitespace
|
||||
}
|
||||
groups = append(groups, buffer.String())
|
||||
buffer.Reset()
|
||||
if idx < len(runes) && runes[idx] != '.' {
|
||||
return nil, fmt.Errorf("invalid key character after whitespace: %c", runes[idx])
|
||||
}
|
||||
break
|
||||
} else {
|
||||
return nil, fmt.Errorf("invalid bare key character: %c", r)
|
||||
}
|
||||
ignoreSpace = true
|
||||
expectDot = false
|
||||
wasInQuotes = false
|
||||
}
|
||||
case ' ':
|
||||
if inQuotes {
|
||||
buffer.WriteRune(char)
|
||||
} else {
|
||||
expectDot = true
|
||||
if endIdx == -1 {
|
||||
endIdx = idx
|
||||
}
|
||||
default:
|
||||
if !inQuotes && !isValidBareChar(char) {
|
||||
return nil, fmt.Errorf("invalid bare character: %c", char)
|
||||
groups = append(groups, string(runes[startIdx:endIdx]))
|
||||
} else if r == '\'' {
|
||||
// parse single quoted key
|
||||
idx++
|
||||
startIdx := idx
|
||||
for {
|
||||
if idx >= len(runes) {
|
||||
return nil, fmt.Errorf("unclosed single-quoted key")
|
||||
}
|
||||
r = runes[idx]
|
||||
if r == '\'' {
|
||||
groups = append(groups, string(runes[startIdx:idx]))
|
||||
idx++
|
||||
break
|
||||
}
|
||||
idx++
|
||||
}
|
||||
if !inQuotes && expectDot {
|
||||
return nil, errors.New("what?")
|
||||
} else if r == '"' {
|
||||
// parse double quoted key
|
||||
idx++
|
||||
startIdx := idx
|
||||
for {
|
||||
if idx >= len(runes) {
|
||||
return nil, fmt.Errorf("unclosed double-quoted key")
|
||||
}
|
||||
r = runes[idx]
|
||||
if r == '"' {
|
||||
groups = append(groups, string(runes[startIdx:idx]))
|
||||
idx++
|
||||
break
|
||||
}
|
||||
idx++
|
||||
}
|
||||
buffer.WriteRune(char)
|
||||
expectDot = false
|
||||
} else if r == '.' {
|
||||
idx++
|
||||
if idx >= len(runes) {
|
||||
return nil, fmt.Errorf("unexpected end of key")
|
||||
}
|
||||
r = runes[idx]
|
||||
if !isValidBareChar(r) && r != '\'' && r != '"' && r != ' ' {
|
||||
return nil, fmt.Errorf("expecting key part after dot")
|
||||
}
|
||||
} else {
|
||||
return nil, fmt.Errorf("invalid key character: %c", r)
|
||||
}
|
||||
}
|
||||
if inQuotes {
|
||||
return nil, errors.New("mismatched quotes")
|
||||
}
|
||||
if buffer.Len() > 0 {
|
||||
groups = append(groups, buffer.String())
|
||||
}
|
||||
if len(groups) == 0 {
|
||||
return nil, errors.New("empty key")
|
||||
return nil, fmt.Errorf("empty key")
|
||||
}
|
||||
return groups, nil
|
||||
}
|
||||
|
||||
func isValidBareChar(r rune) bool {
|
||||
return isAlphanumeric(r) || r == '-' || unicode.IsNumber(r)
|
||||
return isAlphanumeric(r) || r == '-' || isDigit(r)
|
||||
}
|
||||
|
|
|
|||
|
|
@ -26,7 +26,7 @@ type tomlLexer struct {
|
|||
currentTokenStart int
|
||||
currentTokenStop int
|
||||
tokens []token
|
||||
depth int
|
||||
brackets []rune
|
||||
line int
|
||||
col int
|
||||
endbufferLine int
|
||||
|
|
@ -123,6 +123,8 @@ func (l *tomlLexer) lexVoid() tomlLexStateFn {
|
|||
for {
|
||||
next := l.peek()
|
||||
switch next {
|
||||
case '}': // after '{'
|
||||
return l.lexRightCurlyBrace
|
||||
case '[':
|
||||
return l.lexTableKey
|
||||
case '#':
|
||||
|
|
@ -140,10 +142,6 @@ func (l *tomlLexer) lexVoid() tomlLexStateFn {
|
|||
l.skip()
|
||||
}
|
||||
|
||||
if l.depth > 0 {
|
||||
return l.lexRvalue
|
||||
}
|
||||
|
||||
if isKeyStartChar(next) {
|
||||
return l.lexKey
|
||||
}
|
||||
|
|
@ -167,10 +165,8 @@ func (l *tomlLexer) lexRvalue() tomlLexStateFn {
|
|||
case '=':
|
||||
return l.lexEqual
|
||||
case '[':
|
||||
l.depth++
|
||||
return l.lexLeftBracket
|
||||
case ']':
|
||||
l.depth--
|
||||
return l.lexRightBracket
|
||||
case '{':
|
||||
return l.lexLeftCurlyBrace
|
||||
|
|
@ -188,12 +184,10 @@ func (l *tomlLexer) lexRvalue() tomlLexStateFn {
|
|||
fallthrough
|
||||
case '\n':
|
||||
l.skip()
|
||||
if l.depth == 0 {
|
||||
return l.lexVoid
|
||||
if len(l.brackets) > 0 && l.brackets[len(l.brackets)-1] == '[' {
|
||||
return l.lexRvalue
|
||||
}
|
||||
return l.lexRvalue
|
||||
case '_':
|
||||
return l.errorf("cannot start number with underscore")
|
||||
return l.lexVoid
|
||||
}
|
||||
|
||||
if l.follow("true") {
|
||||
|
|
@ -223,9 +217,12 @@ func (l *tomlLexer) lexRvalue() tomlLexStateFn {
|
|||
}
|
||||
|
||||
possibleDate := l.peekString(35)
|
||||
dateMatch := dateRegexp.FindString(possibleDate)
|
||||
if dateMatch != "" {
|
||||
l.fastForward(len(dateMatch))
|
||||
dateSubmatches := dateRegexp.FindStringSubmatch(possibleDate)
|
||||
if dateSubmatches != nil && dateSubmatches[0] != "" {
|
||||
l.fastForward(len(dateSubmatches[0]))
|
||||
if dateSubmatches[2] == "" { // no timezone information => local date
|
||||
return l.lexLocalDate
|
||||
}
|
||||
return l.lexDate
|
||||
}
|
||||
|
||||
|
|
@ -233,10 +230,6 @@ func (l *tomlLexer) lexRvalue() tomlLexStateFn {
|
|||
return l.lexNumber
|
||||
}
|
||||
|
||||
if isAlphanumeric(next) {
|
||||
return l.lexKey
|
||||
}
|
||||
|
||||
return l.errorf("no value can start with %c", next)
|
||||
}
|
||||
|
||||
|
|
@ -247,12 +240,17 @@ func (l *tomlLexer) lexRvalue() tomlLexStateFn {
|
|||
func (l *tomlLexer) lexLeftCurlyBrace() tomlLexStateFn {
|
||||
l.next()
|
||||
l.emit(tokenLeftCurlyBrace)
|
||||
return l.lexRvalue
|
||||
l.brackets = append(l.brackets, '{')
|
||||
return l.lexVoid
|
||||
}
|
||||
|
||||
func (l *tomlLexer) lexRightCurlyBrace() tomlLexStateFn {
|
||||
l.next()
|
||||
l.emit(tokenRightCurlyBrace)
|
||||
if len(l.brackets) == 0 || l.brackets[len(l.brackets)-1] != '{' {
|
||||
return l.errorf("cannot have '}' here")
|
||||
}
|
||||
l.brackets = l.brackets[:len(l.brackets)-1]
|
||||
return l.lexRvalue
|
||||
}
|
||||
|
||||
|
|
@ -261,6 +259,11 @@ func (l *tomlLexer) lexDate() tomlLexStateFn {
|
|||
return l.lexRvalue
|
||||
}
|
||||
|
||||
func (l *tomlLexer) lexLocalDate() tomlLexStateFn {
|
||||
l.emit(tokenLocalDate)
|
||||
return l.lexRvalue
|
||||
}
|
||||
|
||||
func (l *tomlLexer) lexTrue() tomlLexStateFn {
|
||||
l.fastForward(4)
|
||||
l.emit(tokenTrue)
|
||||
|
|
@ -294,6 +297,9 @@ func (l *tomlLexer) lexEqual() tomlLexStateFn {
|
|||
func (l *tomlLexer) lexComma() tomlLexStateFn {
|
||||
l.next()
|
||||
l.emit(tokenComma)
|
||||
if len(l.brackets) > 0 && l.brackets[len(l.brackets)-1] == '{' {
|
||||
return l.lexVoid
|
||||
}
|
||||
return l.lexRvalue
|
||||
}
|
||||
|
||||
|
|
@ -309,7 +315,7 @@ func (l *tomlLexer) lexKey() tomlLexStateFn {
|
|||
if err != nil {
|
||||
return l.errorf(err.Error())
|
||||
}
|
||||
growingString += str
|
||||
growingString += "\"" + str + "\""
|
||||
l.next()
|
||||
continue
|
||||
} else if r == '\'' {
|
||||
|
|
@ -318,13 +324,34 @@ func (l *tomlLexer) lexKey() tomlLexStateFn {
|
|||
if err != nil {
|
||||
return l.errorf(err.Error())
|
||||
}
|
||||
growingString += str
|
||||
growingString += "'" + str + "'"
|
||||
l.next()
|
||||
continue
|
||||
} else if r == '\n' {
|
||||
return l.errorf("keys cannot contain new lines")
|
||||
} else if isSpace(r) {
|
||||
break
|
||||
str := " "
|
||||
// skip trailing whitespace
|
||||
l.next()
|
||||
for r = l.peek(); isSpace(r); r = l.peek() {
|
||||
str += string(r)
|
||||
l.next()
|
||||
}
|
||||
// break loop if not a dot
|
||||
if r != '.' {
|
||||
break
|
||||
}
|
||||
str += "."
|
||||
// skip trailing whitespace after dot
|
||||
l.next()
|
||||
for r = l.peek(); isSpace(r); r = l.peek() {
|
||||
str += string(r)
|
||||
l.next()
|
||||
}
|
||||
growingString += str
|
||||
continue
|
||||
} else if r == '.' {
|
||||
// skip
|
||||
} else if !isValidBareChar(r) {
|
||||
return l.errorf("keys cannot contain %c character", r)
|
||||
}
|
||||
|
|
@ -351,6 +378,7 @@ func (l *tomlLexer) lexComment(previousState tomlLexStateFn) tomlLexStateFn {
|
|||
func (l *tomlLexer) lexLeftBracket() tomlLexStateFn {
|
||||
l.next()
|
||||
l.emit(tokenLeftBracket)
|
||||
l.brackets = append(l.brackets, '[')
|
||||
return l.lexRvalue
|
||||
}
|
||||
|
||||
|
|
@ -502,7 +530,7 @@ func (l *tomlLexer) lexStringAsString(terminator string, discardLeadingNewLine,
|
|||
} else {
|
||||
r := l.peek()
|
||||
|
||||
if 0x00 <= r && r <= 0x1F && !(acceptNewLines && (r == '\n' || r == '\r')) {
|
||||
if 0x00 <= r && r <= 0x1F && r != '\t' && !(acceptNewLines && (r == '\n' || r == '\r')) {
|
||||
return "", fmt.Errorf("unescaped control character %U", r)
|
||||
}
|
||||
l.next()
|
||||
|
|
@ -533,7 +561,6 @@ func (l *tomlLexer) lexString() tomlLexStateFn {
|
|||
}
|
||||
|
||||
str, err := l.lexStringAsString(terminator, discardLeadingNewLine, acceptNewLines)
|
||||
|
||||
if err != nil {
|
||||
return l.errorf(err.Error())
|
||||
}
|
||||
|
|
@ -605,6 +632,10 @@ func (l *tomlLexer) lexInsideTableKey() tomlLexStateFn {
|
|||
func (l *tomlLexer) lexRightBracket() tomlLexStateFn {
|
||||
l.next()
|
||||
l.emit(tokenRightBracket)
|
||||
if len(l.brackets) == 0 || l.brackets[len(l.brackets)-1] != '[' {
|
||||
return l.errorf("cannot have ']' here")
|
||||
}
|
||||
l.brackets = l.brackets[:len(l.brackets)-1]
|
||||
return l.lexRvalue
|
||||
}
|
||||
|
||||
|
|
@ -731,7 +762,27 @@ func (l *tomlLexer) run() {
|
|||
}
|
||||
|
||||
func init() {
|
||||
dateRegexp = regexp.MustCompile(`^\d{1,4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d{1,9})?(Z|[+-]\d{2}:\d{2})`)
|
||||
// Regexp for all date/time formats supported by TOML.
|
||||
// Group 1: nano precision
|
||||
// Group 2: timezone
|
||||
//
|
||||
// /!\ also matches the empty string
|
||||
//
|
||||
// Example matches:
|
||||
//1979-05-27T07:32:00Z
|
||||
//1979-05-27T00:32:00-07:00
|
||||
//1979-05-27T00:32:00.999999-07:00
|
||||
//1979-05-27 07:32:00Z
|
||||
//1979-05-27 00:32:00-07:00
|
||||
//1979-05-27 00:32:00.999999-07:00
|
||||
//1979-05-27T07:32:00
|
||||
//1979-05-27T00:32:00.999999
|
||||
//1979-05-27 07:32:00
|
||||
//1979-05-27 00:32:00.999999
|
||||
//1979-05-27
|
||||
//07:32:00
|
||||
//00:32:00.999999
|
||||
dateRegexp = regexp.MustCompile(`^(?:\d{1,4}-\d{2}-\d{2})?(?:[T ]?\d{2}:\d{2}:\d{2}(\.\d{1,9})?(Z|[+-]\d{2}:\d{2})?)?`)
|
||||
}
|
||||
|
||||
// Entry point
|
||||
|
|
|
|||
|
|
@ -0,0 +1,281 @@
|
|||
// Implementation of TOML's local date/time.
|
||||
// Copied over from https://github.com/googleapis/google-cloud-go/blob/master/civil/civil.go
|
||||
// to avoid pulling all the Google dependencies.
|
||||
//
|
||||
// Copyright 2016 Google LLC
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// Package civil implements types for civil time, a time-zone-independent
|
||||
// representation of time that follows the rules of the proleptic
|
||||
// Gregorian calendar with exactly 24-hour days, 60-minute hours, and 60-second
|
||||
// minutes.
|
||||
//
|
||||
// Because they lack location information, these types do not represent unique
|
||||
// moments or intervals of time. Use time.Time for that purpose.
|
||||
package toml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
)
|
||||
|
||||
// A LocalDate represents a date (year, month, day).
|
||||
//
|
||||
// This type does not include location information, and therefore does not
|
||||
// describe a unique 24-hour timespan.
|
||||
type LocalDate struct {
|
||||
Year int // Year (e.g., 2014).
|
||||
Month time.Month // Month of the year (January = 1, ...).
|
||||
Day int // Day of the month, starting at 1.
|
||||
}
|
||||
|
||||
// LocalDateOf returns the LocalDate in which a time occurs in that time's location.
|
||||
func LocalDateOf(t time.Time) LocalDate {
|
||||
var d LocalDate
|
||||
d.Year, d.Month, d.Day = t.Date()
|
||||
return d
|
||||
}
|
||||
|
||||
// ParseLocalDate parses a string in RFC3339 full-date format and returns the date value it represents.
|
||||
func ParseLocalDate(s string) (LocalDate, error) {
|
||||
t, err := time.Parse("2006-01-02", s)
|
||||
if err != nil {
|
||||
return LocalDate{}, err
|
||||
}
|
||||
return LocalDateOf(t), nil
|
||||
}
|
||||
|
||||
// String returns the date in RFC3339 full-date format.
|
||||
func (d LocalDate) String() string {
|
||||
return fmt.Sprintf("%04d-%02d-%02d", d.Year, d.Month, d.Day)
|
||||
}
|
||||
|
||||
// IsValid reports whether the date is valid.
|
||||
func (d LocalDate) IsValid() bool {
|
||||
return LocalDateOf(d.In(time.UTC)) == d
|
||||
}
|
||||
|
||||
// In returns the time corresponding to time 00:00:00 of the date in the location.
|
||||
//
|
||||
// In is always consistent with time.LocalDate, even when time.LocalDate returns a time
|
||||
// on a different day. For example, if loc is America/Indiana/Vincennes, then both
|
||||
// time.LocalDate(1955, time.May, 1, 0, 0, 0, 0, loc)
|
||||
// and
|
||||
// civil.LocalDate{Year: 1955, Month: time.May, Day: 1}.In(loc)
|
||||
// return 23:00:00 on April 30, 1955.
|
||||
//
|
||||
// In panics if loc is nil.
|
||||
func (d LocalDate) In(loc *time.Location) time.Time {
|
||||
return time.Date(d.Year, d.Month, d.Day, 0, 0, 0, 0, loc)
|
||||
}
|
||||
|
||||
// AddDays returns the date that is n days in the future.
|
||||
// n can also be negative to go into the past.
|
||||
func (d LocalDate) AddDays(n int) LocalDate {
|
||||
return LocalDateOf(d.In(time.UTC).AddDate(0, 0, n))
|
||||
}
|
||||
|
||||
// DaysSince returns the signed number of days between the date and s, not including the end day.
|
||||
// This is the inverse operation to AddDays.
|
||||
func (d LocalDate) DaysSince(s LocalDate) (days int) {
|
||||
// We convert to Unix time so we do not have to worry about leap seconds:
|
||||
// Unix time increases by exactly 86400 seconds per day.
|
||||
deltaUnix := d.In(time.UTC).Unix() - s.In(time.UTC).Unix()
|
||||
return int(deltaUnix / 86400)
|
||||
}
|
||||
|
||||
// Before reports whether d1 occurs before d2.
|
||||
func (d1 LocalDate) Before(d2 LocalDate) bool {
|
||||
if d1.Year != d2.Year {
|
||||
return d1.Year < d2.Year
|
||||
}
|
||||
if d1.Month != d2.Month {
|
||||
return d1.Month < d2.Month
|
||||
}
|
||||
return d1.Day < d2.Day
|
||||
}
|
||||
|
||||
// After reports whether d1 occurs after d2.
|
||||
func (d1 LocalDate) After(d2 LocalDate) bool {
|
||||
return d2.Before(d1)
|
||||
}
|
||||
|
||||
// MarshalText implements the encoding.TextMarshaler interface.
|
||||
// The output is the result of d.String().
|
||||
func (d LocalDate) MarshalText() ([]byte, error) {
|
||||
return []byte(d.String()), nil
|
||||
}
|
||||
|
||||
// UnmarshalText implements the encoding.TextUnmarshaler interface.
|
||||
// The date is expected to be a string in a format accepted by ParseLocalDate.
|
||||
func (d *LocalDate) UnmarshalText(data []byte) error {
|
||||
var err error
|
||||
*d, err = ParseLocalDate(string(data))
|
||||
return err
|
||||
}
|
||||
|
||||
// A LocalTime represents a time with nanosecond precision.
|
||||
//
|
||||
// This type does not include location information, and therefore does not
|
||||
// describe a unique moment in time.
|
||||
//
|
||||
// This type exists to represent the TIME type in storage-based APIs like BigQuery.
|
||||
// Most operations on Times are unlikely to be meaningful. Prefer the LocalDateTime type.
|
||||
type LocalTime struct {
|
||||
Hour int // The hour of the day in 24-hour format; range [0-23]
|
||||
Minute int // The minute of the hour; range [0-59]
|
||||
Second int // The second of the minute; range [0-59]
|
||||
Nanosecond int // The nanosecond of the second; range [0-999999999]
|
||||
}
|
||||
|
||||
// LocalTimeOf returns the LocalTime representing the time of day in which a time occurs
|
||||
// in that time's location. It ignores the date.
|
||||
func LocalTimeOf(t time.Time) LocalTime {
|
||||
var tm LocalTime
|
||||
tm.Hour, tm.Minute, tm.Second = t.Clock()
|
||||
tm.Nanosecond = t.Nanosecond()
|
||||
return tm
|
||||
}
|
||||
|
||||
// ParseLocalTime parses a string and returns the time value it represents.
|
||||
// ParseLocalTime accepts an extended form of the RFC3339 partial-time format. After
|
||||
// the HH:MM:SS part of the string, an optional fractional part may appear,
|
||||
// consisting of a decimal point followed by one to nine decimal digits.
|
||||
// (RFC3339 admits only one digit after the decimal point).
|
||||
func ParseLocalTime(s string) (LocalTime, error) {
|
||||
t, err := time.Parse("15:04:05.999999999", s)
|
||||
if err != nil {
|
||||
return LocalTime{}, err
|
||||
}
|
||||
return LocalTimeOf(t), nil
|
||||
}
|
||||
|
||||
// String returns the date in the format described in ParseLocalTime. If Nanoseconds
|
||||
// is zero, no fractional part will be generated. Otherwise, the result will
|
||||
// end with a fractional part consisting of a decimal point and nine digits.
|
||||
func (t LocalTime) String() string {
|
||||
s := fmt.Sprintf("%02d:%02d:%02d", t.Hour, t.Minute, t.Second)
|
||||
if t.Nanosecond == 0 {
|
||||
return s
|
||||
}
|
||||
return s + fmt.Sprintf(".%09d", t.Nanosecond)
|
||||
}
|
||||
|
||||
// IsValid reports whether the time is valid.
|
||||
func (t LocalTime) IsValid() bool {
|
||||
// Construct a non-zero time.
|
||||
tm := time.Date(2, 2, 2, t.Hour, t.Minute, t.Second, t.Nanosecond, time.UTC)
|
||||
return LocalTimeOf(tm) == t
|
||||
}
|
||||
|
||||
// MarshalText implements the encoding.TextMarshaler interface.
|
||||
// The output is the result of t.String().
|
||||
func (t LocalTime) MarshalText() ([]byte, error) {
|
||||
return []byte(t.String()), nil
|
||||
}
|
||||
|
||||
// UnmarshalText implements the encoding.TextUnmarshaler interface.
|
||||
// The time is expected to be a string in a format accepted by ParseLocalTime.
|
||||
func (t *LocalTime) UnmarshalText(data []byte) error {
|
||||
var err error
|
||||
*t, err = ParseLocalTime(string(data))
|
||||
return err
|
||||
}
|
||||
|
||||
// A LocalDateTime represents a date and time.
|
||||
//
|
||||
// This type does not include location information, and therefore does not
|
||||
// describe a unique moment in time.
|
||||
type LocalDateTime struct {
|
||||
Date LocalDate
|
||||
Time LocalTime
|
||||
}
|
||||
|
||||
// Note: We deliberately do not embed LocalDate into LocalDateTime, to avoid promoting AddDays and Sub.
|
||||
|
||||
// LocalDateTimeOf returns the LocalDateTime in which a time occurs in that time's location.
|
||||
func LocalDateTimeOf(t time.Time) LocalDateTime {
|
||||
return LocalDateTime{
|
||||
Date: LocalDateOf(t),
|
||||
Time: LocalTimeOf(t),
|
||||
}
|
||||
}
|
||||
|
||||
// ParseLocalDateTime parses a string and returns the LocalDateTime it represents.
|
||||
// ParseLocalDateTime accepts a variant of the RFC3339 date-time format that omits
|
||||
// the time offset but includes an optional fractional time, as described in
|
||||
// ParseLocalTime. Informally, the accepted format is
|
||||
// YYYY-MM-DDTHH:MM:SS[.FFFFFFFFF]
|
||||
// where the 'T' may be a lower-case 't'.
|
||||
func ParseLocalDateTime(s string) (LocalDateTime, error) {
|
||||
t, err := time.Parse("2006-01-02T15:04:05.999999999", s)
|
||||
if err != nil {
|
||||
t, err = time.Parse("2006-01-02t15:04:05.999999999", s)
|
||||
if err != nil {
|
||||
return LocalDateTime{}, err
|
||||
}
|
||||
}
|
||||
return LocalDateTimeOf(t), nil
|
||||
}
|
||||
|
||||
// String returns the date in the format described in ParseLocalDate.
|
||||
func (dt LocalDateTime) String() string {
|
||||
return dt.Date.String() + "T" + dt.Time.String()
|
||||
}
|
||||
|
||||
// IsValid reports whether the datetime is valid.
|
||||
func (dt LocalDateTime) IsValid() bool {
|
||||
return dt.Date.IsValid() && dt.Time.IsValid()
|
||||
}
|
||||
|
||||
// In returns the time corresponding to the LocalDateTime in the given location.
|
||||
//
|
||||
// If the time is missing or ambigous at the location, In returns the same
|
||||
// result as time.LocalDate. For example, if loc is America/Indiana/Vincennes, then
|
||||
// both
|
||||
// time.LocalDate(1955, time.May, 1, 0, 30, 0, 0, loc)
|
||||
// and
|
||||
// civil.LocalDateTime{
|
||||
// civil.LocalDate{Year: 1955, Month: time.May, Day: 1}},
|
||||
// civil.LocalTime{Minute: 30}}.In(loc)
|
||||
// return 23:30:00 on April 30, 1955.
|
||||
//
|
||||
// In panics if loc is nil.
|
||||
func (dt LocalDateTime) In(loc *time.Location) time.Time {
|
||||
return time.Date(dt.Date.Year, dt.Date.Month, dt.Date.Day, dt.Time.Hour, dt.Time.Minute, dt.Time.Second, dt.Time.Nanosecond, loc)
|
||||
}
|
||||
|
||||
// Before reports whether dt1 occurs before dt2.
|
||||
func (dt1 LocalDateTime) Before(dt2 LocalDateTime) bool {
|
||||
return dt1.In(time.UTC).Before(dt2.In(time.UTC))
|
||||
}
|
||||
|
||||
// After reports whether dt1 occurs after dt2.
|
||||
func (dt1 LocalDateTime) After(dt2 LocalDateTime) bool {
|
||||
return dt2.Before(dt1)
|
||||
}
|
||||
|
||||
// MarshalText implements the encoding.TextMarshaler interface.
|
||||
// The output is the result of dt.String().
|
||||
func (dt LocalDateTime) MarshalText() ([]byte, error) {
|
||||
return []byte(dt.String()), nil
|
||||
}
|
||||
|
||||
// UnmarshalText implements the encoding.TextUnmarshaler interface.
|
||||
// The datetime is expected to be a string in a format accepted by ParseLocalDateTime
|
||||
func (dt *LocalDateTime) UnmarshalText(data []byte) error {
|
||||
var err error
|
||||
*dt, err = ParseLocalDateTime(string(data))
|
||||
return err
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
39
vendor/github.com/pelletier/go-toml/marshal_OrderPreserve_test.toml
generated
vendored
Normal file
39
vendor/github.com/pelletier/go-toml/marshal_OrderPreserve_test.toml
generated
vendored
Normal file
|
|
@ -0,0 +1,39 @@
|
|||
title = "TOML Marshal Testing"
|
||||
|
||||
[basic_lists]
|
||||
floats = [12.3,45.6,78.9]
|
||||
bools = [true,false,true]
|
||||
dates = [1979-05-27T07:32:00Z,1980-05-27T07:32:00Z]
|
||||
ints = [8001,8001,8002]
|
||||
uints = [5002,5003]
|
||||
strings = ["One","Two","Three"]
|
||||
|
||||
[[subdocptrs]]
|
||||
name = "Second"
|
||||
|
||||
[basic_map]
|
||||
one = "one"
|
||||
two = "two"
|
||||
|
||||
[subdoc]
|
||||
|
||||
[subdoc.second]
|
||||
name = "Second"
|
||||
|
||||
[subdoc.first]
|
||||
name = "First"
|
||||
|
||||
[basic]
|
||||
uint = 5001
|
||||
bool = true
|
||||
float = 123.4
|
||||
float64 = 123.456782132399
|
||||
int = 5000
|
||||
string = "Bite me"
|
||||
date = 1979-05-27T07:32:00Z
|
||||
|
||||
[[subdoclist]]
|
||||
name = "List.First"
|
||||
|
||||
[[subdoclist]]
|
||||
name = "List.Second"
|
||||
|
|
@ -4,6 +4,7 @@ title = "TOML Marshal Testing"
|
|||
bool = true
|
||||
date = 1979-05-27T07:32:00Z
|
||||
float = 123.4
|
||||
float64 = 123.456782132399
|
||||
int = 5000
|
||||
string = "Bite me"
|
||||
uint = 5001
|
||||
|
|
|
|||
|
|
@ -77,8 +77,10 @@ func (p *tomlParser) parseStart() tomlParserStateFn {
|
|||
return p.parseAssign
|
||||
case tokenEOF:
|
||||
return nil
|
||||
case tokenError:
|
||||
p.raiseError(tok, "parsing error: %s", tok.String())
|
||||
default:
|
||||
p.raiseError(tok, "unexpected token")
|
||||
p.raiseError(tok, "unexpected token %s", tok.typ)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
|
@ -156,6 +158,11 @@ func (p *tomlParser) parseGroup() tomlParserStateFn {
|
|||
if err := p.tree.createSubTree(keys, startToken.Position); err != nil {
|
||||
p.raiseError(key, "%s", err)
|
||||
}
|
||||
destTree := p.tree.GetPath(keys)
|
||||
if target, ok := destTree.(*Tree); ok && target != nil && target.inline {
|
||||
p.raiseError(key, "could not re-define exist inline table or its sub-table : %s",
|
||||
strings.Join(keys, "."))
|
||||
}
|
||||
p.assume(tokenRightBracket)
|
||||
p.currentTable = keys
|
||||
return p.parseStart
|
||||
|
|
@ -165,6 +172,11 @@ func (p *tomlParser) parseAssign() tomlParserStateFn {
|
|||
key := p.getToken()
|
||||
p.assume(tokenEqual)
|
||||
|
||||
parsedKey, err := parseKey(key.val)
|
||||
if err != nil {
|
||||
p.raiseError(key, "invalid key: %s", err.Error())
|
||||
}
|
||||
|
||||
value := p.parseRvalue()
|
||||
var tableKey []string
|
||||
if len(p.currentTable) > 0 {
|
||||
|
|
@ -173,6 +185,9 @@ func (p *tomlParser) parseAssign() tomlParserStateFn {
|
|||
tableKey = []string{}
|
||||
}
|
||||
|
||||
prefixKey := parsedKey[0 : len(parsedKey)-1]
|
||||
tableKey = append(tableKey, prefixKey...)
|
||||
|
||||
// find the table to assign, looking out for arrays of tables
|
||||
var targetNode *Tree
|
||||
switch node := p.tree.GetPath(tableKey).(type) {
|
||||
|
|
@ -180,17 +195,24 @@ func (p *tomlParser) parseAssign() tomlParserStateFn {
|
|||
targetNode = node[len(node)-1]
|
||||
case *Tree:
|
||||
targetNode = node
|
||||
case nil:
|
||||
// create intermediate
|
||||
if err := p.tree.createSubTree(tableKey, key.Position); err != nil {
|
||||
p.raiseError(key, "could not create intermediate group: %s", err)
|
||||
}
|
||||
targetNode = p.tree.GetPath(tableKey).(*Tree)
|
||||
default:
|
||||
p.raiseError(key, "Unknown table type for path: %s",
|
||||
strings.Join(tableKey, "."))
|
||||
}
|
||||
|
||||
// assign value to the found table
|
||||
keyVals := []string{key.val}
|
||||
if len(keyVals) != 1 {
|
||||
p.raiseError(key, "Invalid key")
|
||||
if targetNode.inline {
|
||||
p.raiseError(key, "could not add key or sub-table to exist inline table or its sub-table : %s",
|
||||
strings.Join(tableKey, "."))
|
||||
}
|
||||
keyVal := keyVals[0]
|
||||
|
||||
// assign value to the found table
|
||||
keyVal := parsedKey[len(parsedKey)-1]
|
||||
localKey := []string{keyVal}
|
||||
finalKey := append(tableKey, keyVal)
|
||||
if targetNode.GetPath(localKey) != nil {
|
||||
|
|
@ -301,7 +323,41 @@ func (p *tomlParser) parseRvalue() interface{} {
|
|||
}
|
||||
return val
|
||||
case tokenDate:
|
||||
val, err := time.ParseInLocation(time.RFC3339Nano, tok.val, time.UTC)
|
||||
layout := time.RFC3339Nano
|
||||
if !strings.Contains(tok.val, "T") {
|
||||
layout = strings.Replace(layout, "T", " ", 1)
|
||||
}
|
||||
val, err := time.ParseInLocation(layout, tok.val, time.UTC)
|
||||
if err != nil {
|
||||
p.raiseError(tok, "%s", err)
|
||||
}
|
||||
return val
|
||||
case tokenLocalDate:
|
||||
v := strings.Replace(tok.val, " ", "T", -1)
|
||||
isDateTime := false
|
||||
isTime := false
|
||||
for _, c := range v {
|
||||
if c == 'T' || c == 't' {
|
||||
isDateTime = true
|
||||
break
|
||||
}
|
||||
if c == ':' {
|
||||
isTime = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
var val interface{}
|
||||
var err error
|
||||
|
||||
if isDateTime {
|
||||
val, err = ParseLocalDateTime(v)
|
||||
} else if isTime {
|
||||
val, err = ParseLocalTime(v)
|
||||
} else {
|
||||
val, err = ParseLocalDate(v)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
p.raiseError(tok, "%s", err)
|
||||
}
|
||||
|
|
@ -338,18 +394,21 @@ Loop:
|
|||
case tokenRightCurlyBrace:
|
||||
p.getToken()
|
||||
break Loop
|
||||
case tokenKey:
|
||||
case tokenKey, tokenInteger, tokenString:
|
||||
if !tokenIsComma(previous) && previous != nil {
|
||||
p.raiseError(follow, "comma expected between fields in inline table")
|
||||
}
|
||||
key := p.getToken()
|
||||
p.assume(tokenEqual)
|
||||
value := p.parseRvalue()
|
||||
tree.Set(key.val, value)
|
||||
case tokenComma:
|
||||
if previous == nil {
|
||||
p.raiseError(follow, "inline table cannot start with a comma")
|
||||
|
||||
parsedKey, err := parseKey(key.val)
|
||||
if err != nil {
|
||||
p.raiseError(key, "invalid key: %s", err)
|
||||
}
|
||||
|
||||
value := p.parseRvalue()
|
||||
tree.SetPath(parsedKey, value)
|
||||
case tokenComma:
|
||||
if tokenIsComma(previous) {
|
||||
p.raiseError(follow, "need field between two commas in inline table")
|
||||
}
|
||||
|
|
@ -362,12 +421,13 @@ Loop:
|
|||
if tokenIsComma(previous) {
|
||||
p.raiseError(previous, "trailing comma at the end of inline table")
|
||||
}
|
||||
tree.inline = true
|
||||
return tree
|
||||
}
|
||||
|
||||
func (p *tomlParser) parseArray() interface{} {
|
||||
var array []interface{}
|
||||
arrayType := reflect.TypeOf(nil)
|
||||
arrayType := reflect.TypeOf(newTree())
|
||||
for {
|
||||
follow := p.peek()
|
||||
if follow == nil || follow.typ == tokenEOF {
|
||||
|
|
@ -378,11 +438,8 @@ func (p *tomlParser) parseArray() interface{} {
|
|||
break
|
||||
}
|
||||
val := p.parseRvalue()
|
||||
if arrayType == nil {
|
||||
arrayType = reflect.TypeOf(val)
|
||||
}
|
||||
if reflect.TypeOf(val) != arrayType {
|
||||
p.raiseError(follow, "mixed types in array")
|
||||
arrayType = nil
|
||||
}
|
||||
array = append(array, val)
|
||||
follow = p.peek()
|
||||
|
|
@ -396,6 +453,12 @@ func (p *tomlParser) parseArray() interface{} {
|
|||
p.getToken()
|
||||
}
|
||||
}
|
||||
|
||||
// if the array is a mixed-type array or its length is 0,
|
||||
// don't convert it to a table array
|
||||
if len(array) <= 0 {
|
||||
arrayType = nil
|
||||
}
|
||||
// An array of Trees is actually an array of inline
|
||||
// tables, which is a shorthand for a table array. If the
|
||||
// array was not converted from []interface{} to []*Tree,
|
||||
|
|
|
|||
|
|
@ -1,88 +0,0 @@
|
|||
#!/bin/bash
|
||||
# fail out of the script if anything here fails
|
||||
set -e
|
||||
set -o pipefail
|
||||
|
||||
# set the path to the present working directory
|
||||
export GOPATH=`pwd`
|
||||
|
||||
function git_clone() {
|
||||
path=$1
|
||||
branch=$2
|
||||
version=$3
|
||||
if [ ! -d "src/$path" ]; then
|
||||
mkdir -p src/$path
|
||||
git clone https://$path.git src/$path
|
||||
fi
|
||||
pushd src/$path
|
||||
git checkout "$branch"
|
||||
git reset --hard "$version"
|
||||
popd
|
||||
}
|
||||
|
||||
# Remove potential previous runs
|
||||
rm -rf src test_program_bin toml-test
|
||||
|
||||
go get github.com/pelletier/go-buffruneio
|
||||
go get github.com/davecgh/go-spew/spew
|
||||
go get gopkg.in/yaml.v2
|
||||
go get github.com/BurntSushi/toml
|
||||
|
||||
# get code for BurntSushi TOML validation
|
||||
# pinning all to 'HEAD' for version 0.3.x work (TODO: pin to commit hash when tests stabilize)
|
||||
git_clone github.com/BurntSushi/toml master HEAD
|
||||
git_clone github.com/BurntSushi/toml-test master HEAD #was: 0.2.0 HEAD
|
||||
|
||||
# build the BurntSushi test application
|
||||
go build -o toml-test github.com/BurntSushi/toml-test
|
||||
|
||||
# vendorize the current lib for testing
|
||||
# NOTE: this basically mocks an install without having to go back out to github for code
|
||||
mkdir -p src/github.com/pelletier/go-toml/cmd
|
||||
mkdir -p src/github.com/pelletier/go-toml/query
|
||||
cp *.go *.toml src/github.com/pelletier/go-toml
|
||||
cp -R cmd/* src/github.com/pelletier/go-toml/cmd
|
||||
cp -R query/* src/github.com/pelletier/go-toml/query
|
||||
go build -o test_program_bin src/github.com/pelletier/go-toml/cmd/test_program.go
|
||||
|
||||
# Run basic unit tests
|
||||
go test github.com/pelletier/go-toml -covermode=count -coverprofile=coverage.out
|
||||
go test github.com/pelletier/go-toml/cmd/tomljson
|
||||
go test github.com/pelletier/go-toml/query
|
||||
|
||||
# run the entire BurntSushi test suite
|
||||
if [[ $# -eq 0 ]] ; then
|
||||
echo "Running all BurntSushi tests"
|
||||
./toml-test ./test_program_bin | tee test_out
|
||||
else
|
||||
# run a specific test
|
||||
test=$1
|
||||
test_path='src/github.com/BurntSushi/toml-test/tests'
|
||||
valid_test="$test_path/valid/$test"
|
||||
invalid_test="$test_path/invalid/$test"
|
||||
|
||||
if [ -e "$valid_test.toml" ]; then
|
||||
echo "Valid Test TOML for $test:"
|
||||
echo "===="
|
||||
cat "$valid_test.toml"
|
||||
|
||||
echo "Valid Test JSON for $test:"
|
||||
echo "===="
|
||||
cat "$valid_test.json"
|
||||
|
||||
echo "Go-TOML Output for $test:"
|
||||
echo "===="
|
||||
cat "$valid_test.toml" | ./test_program_bin
|
||||
fi
|
||||
|
||||
if [ -e "$invalid_test.toml" ]; then
|
||||
echo "Invalid Test TOML for $test:"
|
||||
echo "===="
|
||||
cat "$invalid_test.toml"
|
||||
|
||||
echo "Go-TOML Output for $test:"
|
||||
echo "===="
|
||||
echo "go-toml Output:"
|
||||
cat "$invalid_test.toml" | ./test_program_bin
|
||||
fi
|
||||
fi
|
||||
|
|
@ -1,10 +1,6 @@
|
|||
package toml
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"unicode"
|
||||
)
|
||||
import "fmt"
|
||||
|
||||
// Define tokens
|
||||
type tokenType int
|
||||
|
|
@ -35,6 +31,7 @@ const (
|
|||
tokenDoubleLeftBracket
|
||||
tokenDoubleRightBracket
|
||||
tokenDate
|
||||
tokenLocalDate
|
||||
tokenKeyGroup
|
||||
tokenKeyGroupArray
|
||||
tokenComma
|
||||
|
|
@ -68,7 +65,8 @@ var tokenTypeNames = []string{
|
|||
")",
|
||||
"]]",
|
||||
"[[",
|
||||
"Date",
|
||||
"LocalDate",
|
||||
"LocalDate",
|
||||
"KeyGroup",
|
||||
"KeyGroupArray",
|
||||
",",
|
||||
|
|
@ -95,14 +93,6 @@ func (tt tokenType) String() string {
|
|||
return "Unknown"
|
||||
}
|
||||
|
||||
func (t token) Int() int {
|
||||
if result, err := strconv.Atoi(t.val); err != nil {
|
||||
panic(err)
|
||||
} else {
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
func (t token) String() string {
|
||||
switch t.typ {
|
||||
case tokenEOF:
|
||||
|
|
@ -119,7 +109,7 @@ func isSpace(r rune) bool {
|
|||
}
|
||||
|
||||
func isAlphanumeric(r rune) bool {
|
||||
return unicode.IsLetter(r) || r == '_'
|
||||
return 'a' <= r && r <= 'z' || 'A' <= r && r <= 'Z' || r == '_'
|
||||
}
|
||||
|
||||
func isKeyChar(r rune) bool {
|
||||
|
|
@ -134,7 +124,7 @@ func isKeyStartChar(r rune) bool {
|
|||
}
|
||||
|
||||
func isDigit(r rune) bool {
|
||||
return unicode.IsNumber(r)
|
||||
return '0' <= r && r <= '9'
|
||||
}
|
||||
|
||||
func isHexDigit(r rune) bool {
|
||||
|
|
|
|||
|
|
@ -23,13 +23,18 @@ type Tree struct {
|
|||
values map[string]interface{} // string -> *tomlValue, *Tree, []*Tree
|
||||
comment string
|
||||
commented bool
|
||||
inline bool
|
||||
position Position
|
||||
}
|
||||
|
||||
func newTree() *Tree {
|
||||
return newTreeWithPosition(Position{})
|
||||
}
|
||||
|
||||
func newTreeWithPosition(pos Position) *Tree {
|
||||
return &Tree{
|
||||
values: make(map[string]interface{}),
|
||||
position: Position{},
|
||||
position: pos,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -194,10 +199,10 @@ func (t *Tree) SetWithOptions(key string, opts SetOptions, value interface{}) {
|
|||
// formatting instructions to the key, that will be reused by Marshal().
|
||||
func (t *Tree) SetPathWithOptions(keys []string, opts SetOptions, value interface{}) {
|
||||
subtree := t
|
||||
for _, intermediateKey := range keys[:len(keys)-1] {
|
||||
for i, intermediateKey := range keys[:len(keys)-1] {
|
||||
nextTree, exists := subtree.values[intermediateKey]
|
||||
if !exists {
|
||||
nextTree = newTree()
|
||||
nextTree = newTreeWithPosition(Position{Line: t.position.Line + i, Col: t.position.Col})
|
||||
subtree.values[intermediateKey] = nextTree // add new element here
|
||||
}
|
||||
switch node := nextTree.(type) {
|
||||
|
|
@ -207,7 +212,7 @@ func (t *Tree) SetPathWithOptions(keys []string, opts SetOptions, value interfac
|
|||
// go to most recent element
|
||||
if len(node) == 0 {
|
||||
// create element if it does not exist
|
||||
subtree.values[intermediateKey] = append(node, newTree())
|
||||
subtree.values[intermediateKey] = append(node, newTreeWithPosition(Position{Line: t.position.Line + i, Col: t.position.Col}))
|
||||
}
|
||||
subtree = node[len(node)-1]
|
||||
}
|
||||
|
|
@ -215,19 +220,25 @@ func (t *Tree) SetPathWithOptions(keys []string, opts SetOptions, value interfac
|
|||
|
||||
var toInsert interface{}
|
||||
|
||||
switch value.(type) {
|
||||
switch v := value.(type) {
|
||||
case *Tree:
|
||||
tt := value.(*Tree)
|
||||
tt.comment = opts.Comment
|
||||
v.comment = opts.Comment
|
||||
v.commented = opts.Commented
|
||||
toInsert = value
|
||||
case []*Tree:
|
||||
for i := range v {
|
||||
v[i].commented = opts.Commented
|
||||
}
|
||||
toInsert = value
|
||||
case *tomlValue:
|
||||
tt := value.(*tomlValue)
|
||||
tt.comment = opts.Comment
|
||||
toInsert = tt
|
||||
v.comment = opts.Comment
|
||||
toInsert = v
|
||||
default:
|
||||
toInsert = &tomlValue{value: value, comment: opts.Comment, commented: opts.Commented, multiline: opts.Multiline}
|
||||
toInsert = &tomlValue{value: value,
|
||||
comment: opts.Comment,
|
||||
commented: opts.Commented,
|
||||
multiline: opts.Multiline,
|
||||
position: Position{Line: subtree.position.Line + len(subtree.values) + 1, Col: subtree.position.Col}}
|
||||
}
|
||||
|
||||
subtree.values[keys[len(keys)-1]] = toInsert
|
||||
|
|
@ -256,44 +267,35 @@ func (t *Tree) SetPath(keys []string, value interface{}) {
|
|||
// SetPathWithComment is the same as SetPath, but allows you to provide comment
|
||||
// information to the key, that will be reused by Marshal().
|
||||
func (t *Tree) SetPathWithComment(keys []string, comment string, commented bool, value interface{}) {
|
||||
subtree := t
|
||||
for _, intermediateKey := range keys[:len(keys)-1] {
|
||||
nextTree, exists := subtree.values[intermediateKey]
|
||||
if !exists {
|
||||
nextTree = newTree()
|
||||
subtree.values[intermediateKey] = nextTree // add new element here
|
||||
}
|
||||
switch node := nextTree.(type) {
|
||||
case *Tree:
|
||||
subtree = node
|
||||
case []*Tree:
|
||||
// go to most recent element
|
||||
if len(node) == 0 {
|
||||
// create element if it does not exist
|
||||
subtree.values[intermediateKey] = append(node, newTree())
|
||||
}
|
||||
subtree = node[len(node)-1]
|
||||
}
|
||||
t.SetPathWithOptions(keys, SetOptions{Comment: comment, Commented: commented}, value)
|
||||
}
|
||||
|
||||
// Delete removes a key from the tree.
|
||||
// Key is a dot-separated path (e.g. a.b.c).
|
||||
func (t *Tree) Delete(key string) error {
|
||||
keys, err := parseKey(key)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return t.DeletePath(keys)
|
||||
}
|
||||
|
||||
var toInsert interface{}
|
||||
|
||||
switch value.(type) {
|
||||
// DeletePath removes a key from the tree.
|
||||
// Keys is an array of path elements (e.g. {"a","b","c"}).
|
||||
func (t *Tree) DeletePath(keys []string) error {
|
||||
keyLen := len(keys)
|
||||
if keyLen == 1 {
|
||||
delete(t.values, keys[0])
|
||||
return nil
|
||||
}
|
||||
tree := t.GetPath(keys[:keyLen-1])
|
||||
item := keys[keyLen-1]
|
||||
switch node := tree.(type) {
|
||||
case *Tree:
|
||||
tt := value.(*Tree)
|
||||
tt.comment = comment
|
||||
toInsert = value
|
||||
case []*Tree:
|
||||
toInsert = value
|
||||
case *tomlValue:
|
||||
tt := value.(*tomlValue)
|
||||
tt.comment = comment
|
||||
toInsert = tt
|
||||
default:
|
||||
toInsert = &tomlValue{value: value, comment: comment, commented: commented}
|
||||
delete(node.values, item)
|
||||
return nil
|
||||
}
|
||||
|
||||
subtree.values[keys[len(keys)-1]] = toInsert
|
||||
return errors.New("no such key to delete")
|
||||
}
|
||||
|
||||
// createSubTree takes a tree and a key and create the necessary intermediate
|
||||
|
|
@ -305,11 +307,12 @@ func (t *Tree) SetPathWithComment(keys []string, comment string, commented bool,
|
|||
// Returns nil on success, error object on failure
|
||||
func (t *Tree) createSubTree(keys []string, pos Position) error {
|
||||
subtree := t
|
||||
for _, intermediateKey := range keys {
|
||||
for i, intermediateKey := range keys {
|
||||
nextTree, exists := subtree.values[intermediateKey]
|
||||
if !exists {
|
||||
tree := newTree()
|
||||
tree := newTreeWithPosition(Position{Line: t.position.Line + i, Col: t.position.Col})
|
||||
tree.position = pos
|
||||
tree.inline = subtree.inline
|
||||
subtree.values[intermediateKey] = tree
|
||||
nextTree = tree
|
||||
}
|
||||
|
|
@ -337,10 +340,39 @@ func LoadBytes(b []byte) (tree *Tree, err error) {
|
|||
err = errors.New(r.(string))
|
||||
}
|
||||
}()
|
||||
|
||||
if len(b) >= 4 && (hasUTF32BigEndianBOM4(b) || hasUTF32LittleEndianBOM4(b)) {
|
||||
b = b[4:]
|
||||
} else if len(b) >= 3 && hasUTF8BOM3(b) {
|
||||
b = b[3:]
|
||||
} else if len(b) >= 2 && (hasUTF16BigEndianBOM2(b) || hasUTF16LittleEndianBOM2(b)) {
|
||||
b = b[2:]
|
||||
}
|
||||
|
||||
tree = parseToml(lexToml(b))
|
||||
return
|
||||
}
|
||||
|
||||
func hasUTF16BigEndianBOM2(b []byte) bool {
|
||||
return b[0] == 0xFE && b[1] == 0xFF
|
||||
}
|
||||
|
||||
func hasUTF16LittleEndianBOM2(b []byte) bool {
|
||||
return b[0] == 0xFF && b[1] == 0xFE
|
||||
}
|
||||
|
||||
func hasUTF8BOM3(b []byte) bool {
|
||||
return b[0] == 0xEF && b[1] == 0xBB && b[2] == 0xBF
|
||||
}
|
||||
|
||||
func hasUTF32BigEndianBOM4(b []byte) bool {
|
||||
return b[0] == 0x00 && b[1] == 0x00 && b[2] == 0xFE && b[3] == 0xFF
|
||||
}
|
||||
|
||||
func hasUTF32LittleEndianBOM4(b []byte) bool {
|
||||
return b[0] == 0xFF && b[1] == 0xFE && b[2] == 0x00 && b[3] == 0x00
|
||||
}
|
||||
|
||||
// LoadReader creates a Tree from any io.Reader.
|
||||
func LoadReader(reader io.Reader) (tree *Tree, err error) {
|
||||
inputBytes, err := ioutil.ReadAll(reader)
|
||||
|
|
|
|||
|
|
@ -5,6 +5,7 @@ import (
|
|||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"math/big"
|
||||
"reflect"
|
||||
"sort"
|
||||
"strconv"
|
||||
|
|
@ -12,26 +13,50 @@ import (
|
|||
"time"
|
||||
)
|
||||
|
||||
type valueComplexity int
|
||||
|
||||
const (
|
||||
valueSimple valueComplexity = iota + 1
|
||||
valueComplex
|
||||
)
|
||||
|
||||
type sortNode struct {
|
||||
key string
|
||||
complexity valueComplexity
|
||||
}
|
||||
|
||||
// Encodes a string to a TOML-compliant multi-line string value
|
||||
// This function is a clone of the existing encodeTomlString function, except that whitespace characters
|
||||
// are preserved. Quotation marks and backslashes are also not escaped.
|
||||
func encodeMultilineTomlString(value string) string {
|
||||
func encodeMultilineTomlString(value string, commented string) string {
|
||||
var b bytes.Buffer
|
||||
adjacentQuoteCount := 0
|
||||
|
||||
for _, rr := range value {
|
||||
b.WriteString(commented)
|
||||
for i, rr := range value {
|
||||
if rr != '"' {
|
||||
adjacentQuoteCount = 0
|
||||
} else {
|
||||
adjacentQuoteCount++
|
||||
}
|
||||
switch rr {
|
||||
case '\b':
|
||||
b.WriteString(`\b`)
|
||||
case '\t':
|
||||
b.WriteString("\t")
|
||||
case '\n':
|
||||
b.WriteString("\n")
|
||||
b.WriteString("\n" + commented)
|
||||
case '\f':
|
||||
b.WriteString(`\f`)
|
||||
case '\r':
|
||||
b.WriteString("\r")
|
||||
case '"':
|
||||
b.WriteString(`"`)
|
||||
if adjacentQuoteCount >= 3 || i == len(value)-1 {
|
||||
adjacentQuoteCount = 0
|
||||
b.WriteString(`\"`)
|
||||
} else {
|
||||
b.WriteString(`"`)
|
||||
}
|
||||
case '\\':
|
||||
b.WriteString(`\`)
|
||||
default:
|
||||
|
|
@ -78,7 +103,30 @@ func encodeTomlString(value string) string {
|
|||
return b.String()
|
||||
}
|
||||
|
||||
func tomlValueStringRepresentation(v interface{}, indent string, arraysOneElementPerLine bool) (string, error) {
|
||||
func tomlTreeStringRepresentation(t *Tree, ord marshalOrder) (string, error) {
|
||||
var orderedVals []sortNode
|
||||
switch ord {
|
||||
case OrderPreserve:
|
||||
orderedVals = sortByLines(t)
|
||||
default:
|
||||
orderedVals = sortAlphabetical(t)
|
||||
}
|
||||
|
||||
var values []string
|
||||
for _, node := range orderedVals {
|
||||
k := node.key
|
||||
v := t.values[k]
|
||||
|
||||
repr, err := tomlValueStringRepresentation(v, "", "", ord, false)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
values = append(values, quoteKeyIfNeeded(k)+" = "+repr)
|
||||
}
|
||||
return "{ " + strings.Join(values, ", ") + " }", nil
|
||||
}
|
||||
|
||||
func tomlValueStringRepresentation(v interface{}, commented string, indent string, ord marshalOrder, arraysOneElementPerLine bool) (string, error) {
|
||||
// this interface check is added to dereference the change made in the writeTo function.
|
||||
// That change was made to allow this function to see formatting options.
|
||||
tv, ok := v.(*tomlValue)
|
||||
|
|
@ -94,20 +142,28 @@ func tomlValueStringRepresentation(v interface{}, indent string, arraysOneElemen
|
|||
case int64:
|
||||
return strconv.FormatInt(value, 10), nil
|
||||
case float64:
|
||||
// Ensure a round float does contain a decimal point. Otherwise feeding
|
||||
// the output back to the parser would convert to an integer.
|
||||
if math.Trunc(value) == value {
|
||||
return strings.ToLower(strconv.FormatFloat(value, 'f', 1, 32)), nil
|
||||
// Default bit length is full 64
|
||||
bits := 64
|
||||
// Float panics if nan is used
|
||||
if !math.IsNaN(value) {
|
||||
// if 32 bit accuracy is enough to exactly show, use 32
|
||||
_, acc := big.NewFloat(value).Float32()
|
||||
if acc == big.Exact {
|
||||
bits = 32
|
||||
}
|
||||
}
|
||||
return strings.ToLower(strconv.FormatFloat(value, 'f', -1, 32)), nil
|
||||
if math.Trunc(value) == value {
|
||||
return strings.ToLower(strconv.FormatFloat(value, 'f', 1, bits)), nil
|
||||
}
|
||||
return strings.ToLower(strconv.FormatFloat(value, 'f', -1, bits)), nil
|
||||
case string:
|
||||
if tv.multiline {
|
||||
return "\"\"\"\n" + encodeMultilineTomlString(value) + "\"\"\"", nil
|
||||
return "\"\"\"\n" + encodeMultilineTomlString(value, commented) + "\"\"\"", nil
|
||||
}
|
||||
return "\"" + encodeTomlString(value) + "\"", nil
|
||||
case []byte:
|
||||
b, _ := v.([]byte)
|
||||
return tomlValueStringRepresentation(string(b), indent, arraysOneElementPerLine)
|
||||
return tomlValueStringRepresentation(string(b), commented, indent, ord, arraysOneElementPerLine)
|
||||
case bool:
|
||||
if value {
|
||||
return "true", nil
|
||||
|
|
@ -115,6 +171,14 @@ func tomlValueStringRepresentation(v interface{}, indent string, arraysOneElemen
|
|||
return "false", nil
|
||||
case time.Time:
|
||||
return value.Format(time.RFC3339), nil
|
||||
case LocalDate:
|
||||
return value.String(), nil
|
||||
case LocalDateTime:
|
||||
return value.String(), nil
|
||||
case LocalTime:
|
||||
return value.String(), nil
|
||||
case *Tree:
|
||||
return tomlTreeStringRepresentation(value, ord)
|
||||
case nil:
|
||||
return "", nil
|
||||
}
|
||||
|
|
@ -125,7 +189,7 @@ func tomlValueStringRepresentation(v interface{}, indent string, arraysOneElemen
|
|||
var values []string
|
||||
for i := 0; i < rv.Len(); i++ {
|
||||
item := rv.Index(i).Interface()
|
||||
itemRepr, err := tomlValueStringRepresentation(item, indent, arraysOneElementPerLine)
|
||||
itemRepr, err := tomlValueStringRepresentation(item, commented, indent, ord, arraysOneElementPerLine)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
|
@ -139,131 +203,252 @@ func tomlValueStringRepresentation(v interface{}, indent string, arraysOneElemen
|
|||
|
||||
for _, value := range values {
|
||||
stringBuffer.WriteString(valueIndent)
|
||||
stringBuffer.WriteString(value)
|
||||
stringBuffer.WriteString(commented + value)
|
||||
stringBuffer.WriteString(`,`)
|
||||
stringBuffer.WriteString("\n")
|
||||
}
|
||||
|
||||
stringBuffer.WriteString(indent + "]")
|
||||
stringBuffer.WriteString(indent + commented + "]")
|
||||
|
||||
return stringBuffer.String(), nil
|
||||
}
|
||||
return "[" + strings.Join(values, ",") + "]", nil
|
||||
return "[" + strings.Join(values, ", ") + "]", nil
|
||||
}
|
||||
return "", fmt.Errorf("unsupported value type %T: %v", v, v)
|
||||
}
|
||||
|
||||
func (t *Tree) writeTo(w io.Writer, indent, keyspace string, bytesCount int64, arraysOneElementPerLine bool) (int64, error) {
|
||||
simpleValuesKeys := make([]string, 0)
|
||||
complexValuesKeys := make([]string, 0)
|
||||
func getTreeArrayLine(trees []*Tree) (line int) {
|
||||
// get lowest line number that is not 0
|
||||
for _, tv := range trees {
|
||||
if tv.position.Line < line || line == 0 {
|
||||
line = tv.position.Line
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func sortByLines(t *Tree) (vals []sortNode) {
|
||||
var (
|
||||
line int
|
||||
lines []int
|
||||
tv *Tree
|
||||
tom *tomlValue
|
||||
node sortNode
|
||||
)
|
||||
vals = make([]sortNode, 0)
|
||||
m := make(map[int]sortNode)
|
||||
|
||||
for k := range t.values {
|
||||
v := t.values[k]
|
||||
switch v.(type) {
|
||||
case *Tree:
|
||||
tv = v.(*Tree)
|
||||
line = tv.position.Line
|
||||
node = sortNode{key: k, complexity: valueComplex}
|
||||
case []*Tree:
|
||||
line = getTreeArrayLine(v.([]*Tree))
|
||||
node = sortNode{key: k, complexity: valueComplex}
|
||||
default:
|
||||
tom = v.(*tomlValue)
|
||||
line = tom.position.Line
|
||||
node = sortNode{key: k, complexity: valueSimple}
|
||||
}
|
||||
lines = append(lines, line)
|
||||
vals = append(vals, node)
|
||||
m[line] = node
|
||||
}
|
||||
sort.Ints(lines)
|
||||
|
||||
for i, line := range lines {
|
||||
vals[i] = m[line]
|
||||
}
|
||||
|
||||
return vals
|
||||
}
|
||||
|
||||
func sortAlphabetical(t *Tree) (vals []sortNode) {
|
||||
var (
|
||||
node sortNode
|
||||
simpVals []string
|
||||
compVals []string
|
||||
)
|
||||
vals = make([]sortNode, 0)
|
||||
m := make(map[string]sortNode)
|
||||
|
||||
for k := range t.values {
|
||||
v := t.values[k]
|
||||
switch v.(type) {
|
||||
case *Tree, []*Tree:
|
||||
complexValuesKeys = append(complexValuesKeys, k)
|
||||
node = sortNode{key: k, complexity: valueComplex}
|
||||
compVals = append(compVals, node.key)
|
||||
default:
|
||||
simpleValuesKeys = append(simpleValuesKeys, k)
|
||||
node = sortNode{key: k, complexity: valueSimple}
|
||||
simpVals = append(simpVals, node.key)
|
||||
}
|
||||
vals = append(vals, node)
|
||||
m[node.key] = node
|
||||
}
|
||||
|
||||
sort.Strings(simpleValuesKeys)
|
||||
sort.Strings(complexValuesKeys)
|
||||
|
||||
for _, k := range simpleValuesKeys {
|
||||
v, ok := t.values[k].(*tomlValue)
|
||||
if !ok {
|
||||
return bytesCount, fmt.Errorf("invalid value type at %s: %T", k, t.values[k])
|
||||
}
|
||||
|
||||
repr, err := tomlValueStringRepresentation(v, indent, arraysOneElementPerLine)
|
||||
if err != nil {
|
||||
return bytesCount, err
|
||||
}
|
||||
|
||||
if v.comment != "" {
|
||||
comment := strings.Replace(v.comment, "\n", "\n"+indent+"#", -1)
|
||||
start := "# "
|
||||
if strings.HasPrefix(comment, "#") {
|
||||
start = ""
|
||||
}
|
||||
writtenBytesCountComment, errc := writeStrings(w, "\n", indent, start, comment, "\n")
|
||||
bytesCount += int64(writtenBytesCountComment)
|
||||
if errc != nil {
|
||||
return bytesCount, errc
|
||||
}
|
||||
}
|
||||
|
||||
var commented string
|
||||
if v.commented {
|
||||
commented = "# "
|
||||
}
|
||||
writtenBytesCount, err := writeStrings(w, indent, commented, k, " = ", repr, "\n")
|
||||
bytesCount += int64(writtenBytesCount)
|
||||
if err != nil {
|
||||
return bytesCount, err
|
||||
}
|
||||
// Simples first to match previous implementation
|
||||
sort.Strings(simpVals)
|
||||
i := 0
|
||||
for _, key := range simpVals {
|
||||
vals[i] = m[key]
|
||||
i++
|
||||
}
|
||||
|
||||
for _, k := range complexValuesKeys {
|
||||
v := t.values[k]
|
||||
sort.Strings(compVals)
|
||||
for _, key := range compVals {
|
||||
vals[i] = m[key]
|
||||
i++
|
||||
}
|
||||
|
||||
combinedKey := k
|
||||
if keyspace != "" {
|
||||
combinedKey = keyspace + "." + combinedKey
|
||||
}
|
||||
var commented string
|
||||
if t.commented {
|
||||
commented = "# "
|
||||
}
|
||||
return vals
|
||||
}
|
||||
|
||||
switch node := v.(type) {
|
||||
// node has to be of those two types given how keys are sorted above
|
||||
case *Tree:
|
||||
tv, ok := t.values[k].(*Tree)
|
||||
func (t *Tree) writeTo(w io.Writer, indent, keyspace string, bytesCount int64, arraysOneElementPerLine bool) (int64, error) {
|
||||
return t.writeToOrdered(w, indent, keyspace, bytesCount, arraysOneElementPerLine, OrderAlphabetical, " ", false)
|
||||
}
|
||||
|
||||
func (t *Tree) writeToOrdered(w io.Writer, indent, keyspace string, bytesCount int64, arraysOneElementPerLine bool, ord marshalOrder, indentString string, parentCommented bool) (int64, error) {
|
||||
var orderedVals []sortNode
|
||||
|
||||
switch ord {
|
||||
case OrderPreserve:
|
||||
orderedVals = sortByLines(t)
|
||||
default:
|
||||
orderedVals = sortAlphabetical(t)
|
||||
}
|
||||
|
||||
for _, node := range orderedVals {
|
||||
switch node.complexity {
|
||||
case valueComplex:
|
||||
k := node.key
|
||||
v := t.values[k]
|
||||
|
||||
combinedKey := quoteKeyIfNeeded(k)
|
||||
if keyspace != "" {
|
||||
combinedKey = keyspace + "." + combinedKey
|
||||
}
|
||||
|
||||
switch node := v.(type) {
|
||||
// node has to be of those two types given how keys are sorted above
|
||||
case *Tree:
|
||||
tv, ok := t.values[k].(*Tree)
|
||||
if !ok {
|
||||
return bytesCount, fmt.Errorf("invalid value type at %s: %T", k, t.values[k])
|
||||
}
|
||||
if tv.comment != "" {
|
||||
comment := strings.Replace(tv.comment, "\n", "\n"+indent+"#", -1)
|
||||
start := "# "
|
||||
if strings.HasPrefix(comment, "#") {
|
||||
start = ""
|
||||
}
|
||||
writtenBytesCountComment, errc := writeStrings(w, "\n", indent, start, comment)
|
||||
bytesCount += int64(writtenBytesCountComment)
|
||||
if errc != nil {
|
||||
return bytesCount, errc
|
||||
}
|
||||
}
|
||||
|
||||
var commented string
|
||||
if parentCommented || t.commented || tv.commented {
|
||||
commented = "# "
|
||||
}
|
||||
writtenBytesCount, err := writeStrings(w, "\n", indent, commented, "[", combinedKey, "]\n")
|
||||
bytesCount += int64(writtenBytesCount)
|
||||
if err != nil {
|
||||
return bytesCount, err
|
||||
}
|
||||
bytesCount, err = node.writeToOrdered(w, indent+indentString, combinedKey, bytesCount, arraysOneElementPerLine, ord, indentString, parentCommented || t.commented || tv.commented)
|
||||
if err != nil {
|
||||
return bytesCount, err
|
||||
}
|
||||
case []*Tree:
|
||||
for _, subTree := range node {
|
||||
var commented string
|
||||
if parentCommented || t.commented || subTree.commented {
|
||||
commented = "# "
|
||||
}
|
||||
writtenBytesCount, err := writeStrings(w, "\n", indent, commented, "[[", combinedKey, "]]\n")
|
||||
bytesCount += int64(writtenBytesCount)
|
||||
if err != nil {
|
||||
return bytesCount, err
|
||||
}
|
||||
|
||||
bytesCount, err = subTree.writeToOrdered(w, indent+indentString, combinedKey, bytesCount, arraysOneElementPerLine, ord, indentString, parentCommented || t.commented || subTree.commented)
|
||||
if err != nil {
|
||||
return bytesCount, err
|
||||
}
|
||||
}
|
||||
}
|
||||
default: // Simple
|
||||
k := node.key
|
||||
v, ok := t.values[k].(*tomlValue)
|
||||
if !ok {
|
||||
return bytesCount, fmt.Errorf("invalid value type at %s: %T", k, t.values[k])
|
||||
}
|
||||
if tv.comment != "" {
|
||||
comment := strings.Replace(tv.comment, "\n", "\n"+indent+"#", -1)
|
||||
|
||||
var commented string
|
||||
if parentCommented || t.commented || v.commented {
|
||||
commented = "# "
|
||||
}
|
||||
repr, err := tomlValueStringRepresentation(v, commented, indent, ord, arraysOneElementPerLine)
|
||||
if err != nil {
|
||||
return bytesCount, err
|
||||
}
|
||||
|
||||
if v.comment != "" {
|
||||
comment := strings.Replace(v.comment, "\n", "\n"+indent+"#", -1)
|
||||
start := "# "
|
||||
if strings.HasPrefix(comment, "#") {
|
||||
start = ""
|
||||
}
|
||||
writtenBytesCountComment, errc := writeStrings(w, "\n", indent, start, comment)
|
||||
writtenBytesCountComment, errc := writeStrings(w, "\n", indent, start, comment, "\n")
|
||||
bytesCount += int64(writtenBytesCountComment)
|
||||
if errc != nil {
|
||||
return bytesCount, errc
|
||||
}
|
||||
}
|
||||
writtenBytesCount, err := writeStrings(w, "\n", indent, commented, "[", combinedKey, "]\n")
|
||||
|
||||
quotedKey := quoteKeyIfNeeded(k)
|
||||
writtenBytesCount, err := writeStrings(w, indent, commented, quotedKey, " = ", repr, "\n")
|
||||
bytesCount += int64(writtenBytesCount)
|
||||
if err != nil {
|
||||
return bytesCount, err
|
||||
}
|
||||
bytesCount, err = node.writeTo(w, indent+" ", combinedKey, bytesCount, arraysOneElementPerLine)
|
||||
if err != nil {
|
||||
return bytesCount, err
|
||||
}
|
||||
case []*Tree:
|
||||
for _, subTree := range node {
|
||||
writtenBytesCount, err := writeStrings(w, "\n", indent, commented, "[[", combinedKey, "]]\n")
|
||||
bytesCount += int64(writtenBytesCount)
|
||||
if err != nil {
|
||||
return bytesCount, err
|
||||
}
|
||||
|
||||
bytesCount, err = subTree.writeTo(w, indent+" ", combinedKey, bytesCount, arraysOneElementPerLine)
|
||||
if err != nil {
|
||||
return bytesCount, err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return bytesCount, nil
|
||||
}
|
||||
|
||||
// quote a key if it does not fit the bare key format (A-Za-z0-9_-)
|
||||
// quoted keys use the same rules as strings
|
||||
func quoteKeyIfNeeded(k string) string {
|
||||
// when encoding a map with the 'quoteMapKeys' option enabled, the tree will contain
|
||||
// keys that have already been quoted.
|
||||
// not an ideal situation, but good enough of a stop gap.
|
||||
if len(k) >= 2 && k[0] == '"' && k[len(k)-1] == '"' {
|
||||
return k
|
||||
}
|
||||
isBare := true
|
||||
for _, r := range k {
|
||||
if !isValidBareChar(r) {
|
||||
isBare = false
|
||||
break
|
||||
}
|
||||
}
|
||||
if isBare {
|
||||
return k
|
||||
}
|
||||
return quoteKey(k)
|
||||
}
|
||||
|
||||
func quoteKey(k string) string {
|
||||
return "\"" + encodeTomlString(k) + "\""
|
||||
}
|
||||
|
||||
func writeStrings(w io.Writer, s ...string) (int, error) {
|
||||
var n int
|
||||
for i := range s {
|
||||
|
|
@ -286,12 +471,11 @@ func (t *Tree) WriteTo(w io.Writer) (int64, error) {
|
|||
// Output spans multiple lines, and is suitable for ingest by a TOML parser.
|
||||
// If the conversion cannot be performed, ToString returns a non-nil error.
|
||||
func (t *Tree) ToTomlString() (string, error) {
|
||||
var buf bytes.Buffer
|
||||
_, err := t.WriteTo(&buf)
|
||||
b, err := t.Marshal()
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return buf.String(), nil
|
||||
return string(b), nil
|
||||
}
|
||||
|
||||
// String generates a human-readable representation of the current tree.
|
||||
|
|
|
|||
|
|
@ -1,15 +1,10 @@
|
|||
language: go
|
||||
go_import_path: github.com/pkg/errors
|
||||
go:
|
||||
- 1.4.x
|
||||
- 1.5.x
|
||||
- 1.6.x
|
||||
- 1.7.x
|
||||
- 1.8.x
|
||||
- 1.9.x
|
||||
- 1.10.x
|
||||
- 1.11.x
|
||||
- 1.12.x
|
||||
- 1.13.x
|
||||
- tip
|
||||
|
||||
script:
|
||||
- go test -v ./...
|
||||
- make check
|
||||
|
|
|
|||
|
|
@ -0,0 +1,44 @@
|
|||
PKGS := github.com/pkg/errors
|
||||
SRCDIRS := $(shell go list -f '{{.Dir}}' $(PKGS))
|
||||
GO := go
|
||||
|
||||
check: test vet gofmt misspell unconvert staticcheck ineffassign unparam
|
||||
|
||||
test:
|
||||
$(GO) test $(PKGS)
|
||||
|
||||
vet: | test
|
||||
$(GO) vet $(PKGS)
|
||||
|
||||
staticcheck:
|
||||
$(GO) get honnef.co/go/tools/cmd/staticcheck
|
||||
staticcheck -checks all $(PKGS)
|
||||
|
||||
misspell:
|
||||
$(GO) get github.com/client9/misspell/cmd/misspell
|
||||
misspell \
|
||||
-locale GB \
|
||||
-error \
|
||||
*.md *.go
|
||||
|
||||
unconvert:
|
||||
$(GO) get github.com/mdempsky/unconvert
|
||||
unconvert -v $(PKGS)
|
||||
|
||||
ineffassign:
|
||||
$(GO) get github.com/gordonklaus/ineffassign
|
||||
find $(SRCDIRS) -name '*.go' | xargs ineffassign
|
||||
|
||||
pedantic: check errcheck
|
||||
|
||||
unparam:
|
||||
$(GO) get mvdan.cc/unparam
|
||||
unparam ./...
|
||||
|
||||
errcheck:
|
||||
$(GO) get github.com/kisielk/errcheck
|
||||
errcheck $(PKGS)
|
||||
|
||||
gofmt:
|
||||
@echo Checking code is gofmted
|
||||
@test -z "$(shell gofmt -s -l -d -e $(SRCDIRS) | tee /dev/stderr)"
|
||||
|
|
@ -41,11 +41,18 @@ default:
|
|||
|
||||
[Read the package documentation for more information](https://godoc.org/github.com/pkg/errors).
|
||||
|
||||
## Roadmap
|
||||
|
||||
With the upcoming [Go2 error proposals](https://go.googlesource.com/proposal/+/master/design/go2draft.md) this package is moving into maintenance mode. The roadmap for a 1.0 release is as follows:
|
||||
|
||||
- 0.9. Remove pre Go 1.9 and Go 1.10 support, address outstanding pull requests (if possible)
|
||||
- 1.0. Final release.
|
||||
|
||||
## Contributing
|
||||
|
||||
We welcome pull requests, bug fixes and issue reports. With that said, the bar for adding new symbols to this package is intentionally set high.
|
||||
Because of the Go2 errors changes, this package is not accepting proposals for new functionality. With that said, we welcome pull requests, bug fixes and issue reports.
|
||||
|
||||
Before proposing a change, please discuss your change by raising an issue.
|
||||
Before sending a PR, please discuss your change by raising an issue.
|
||||
|
||||
## License
|
||||
|
||||
|
|
|
|||
|
|
@ -82,7 +82,7 @@
|
|||
//
|
||||
// if err, ok := err.(stackTracer); ok {
|
||||
// for _, f := range err.StackTrace() {
|
||||
// fmt.Printf("%+s:%d", f)
|
||||
// fmt.Printf("%+s:%d\n", f, f)
|
||||
// }
|
||||
// }
|
||||
//
|
||||
|
|
@ -159,6 +159,9 @@ type withStack struct {
|
|||
|
||||
func (w *withStack) Cause() error { return w.error }
|
||||
|
||||
// Unwrap provides compatibility for Go 1.13 error chains.
|
||||
func (w *withStack) Unwrap() error { return w.error }
|
||||
|
||||
func (w *withStack) Format(s fmt.State, verb rune) {
|
||||
switch verb {
|
||||
case 'v':
|
||||
|
|
@ -241,6 +244,9 @@ type withMessage struct {
|
|||
func (w *withMessage) Error() string { return w.msg + ": " + w.cause.Error() }
|
||||
func (w *withMessage) Cause() error { return w.cause }
|
||||
|
||||
// Unwrap provides compatibility for Go 1.13 error chains.
|
||||
func (w *withMessage) Unwrap() error { return w.cause }
|
||||
|
||||
func (w *withMessage) Format(s fmt.State, verb rune) {
|
||||
switch verb {
|
||||
case 'v':
|
||||
|
|
|
|||
|
|
@ -0,0 +1,38 @@
|
|||
// +build go1.13
|
||||
|
||||
package errors
|
||||
|
||||
import (
|
||||
stderrors "errors"
|
||||
)
|
||||
|
||||
// Is reports whether any error in err's chain matches target.
|
||||
//
|
||||
// The chain consists of err itself followed by the sequence of errors obtained by
|
||||
// repeatedly calling Unwrap.
|
||||
//
|
||||
// An error is considered to match a target if it is equal to that target or if
|
||||
// it implements a method Is(error) bool such that Is(target) returns true.
|
||||
func Is(err, target error) bool { return stderrors.Is(err, target) }
|
||||
|
||||
// As finds the first error in err's chain that matches target, and if so, sets
|
||||
// target to that error value and returns true.
|
||||
//
|
||||
// The chain consists of err itself followed by the sequence of errors obtained by
|
||||
// repeatedly calling Unwrap.
|
||||
//
|
||||
// An error matches target if the error's concrete value is assignable to the value
|
||||
// pointed to by target, or if the error has a method As(interface{}) bool such that
|
||||
// As(target) returns true. In the latter case, the As method is responsible for
|
||||
// setting target.
|
||||
//
|
||||
// As will panic if target is not a non-nil pointer to either a type that implements
|
||||
// error, or to any interface type. As returns false if err is nil.
|
||||
func As(err error, target interface{}) bool { return stderrors.As(err, target) }
|
||||
|
||||
// Unwrap returns the result of calling the Unwrap method on err, if err's
|
||||
// type contains an Unwrap method returning error.
|
||||
// Otherwise, Unwrap returns nil.
|
||||
func Unwrap(err error) error {
|
||||
return stderrors.Unwrap(err)
|
||||
}
|
||||
|
|
@ -5,10 +5,13 @@ import (
|
|||
"io"
|
||||
"path"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Frame represents a program counter inside a stack frame.
|
||||
// For historical reasons if Frame is interpreted as a uintptr
|
||||
// its value represents the program counter + 1.
|
||||
type Frame uintptr
|
||||
|
||||
// pc returns the program counter for this frame;
|
||||
|
|
@ -37,6 +40,15 @@ func (f Frame) line() int {
|
|||
return line
|
||||
}
|
||||
|
||||
// name returns the name of this function, if known.
|
||||
func (f Frame) name() string {
|
||||
fn := runtime.FuncForPC(f.pc())
|
||||
if fn == nil {
|
||||
return "unknown"
|
||||
}
|
||||
return fn.Name()
|
||||
}
|
||||
|
||||
// Format formats the frame according to the fmt.Formatter interface.
|
||||
//
|
||||
// %s source file
|
||||
|
|
@ -54,22 +66,16 @@ func (f Frame) Format(s fmt.State, verb rune) {
|
|||
case 's':
|
||||
switch {
|
||||
case s.Flag('+'):
|
||||
pc := f.pc()
|
||||
fn := runtime.FuncForPC(pc)
|
||||
if fn == nil {
|
||||
io.WriteString(s, "unknown")
|
||||
} else {
|
||||
file, _ := fn.FileLine(pc)
|
||||
fmt.Fprintf(s, "%s\n\t%s", fn.Name(), file)
|
||||
}
|
||||
io.WriteString(s, f.name())
|
||||
io.WriteString(s, "\n\t")
|
||||
io.WriteString(s, f.file())
|
||||
default:
|
||||
io.WriteString(s, path.Base(f.file()))
|
||||
}
|
||||
case 'd':
|
||||
fmt.Fprintf(s, "%d", f.line())
|
||||
io.WriteString(s, strconv.Itoa(f.line()))
|
||||
case 'n':
|
||||
name := runtime.FuncForPC(f.pc()).Name()
|
||||
io.WriteString(s, funcname(name))
|
||||
io.WriteString(s, funcname(f.name()))
|
||||
case 'v':
|
||||
f.Format(s, 's')
|
||||
io.WriteString(s, ":")
|
||||
|
|
@ -77,6 +83,16 @@ func (f Frame) Format(s fmt.State, verb rune) {
|
|||
}
|
||||
}
|
||||
|
||||
// MarshalText formats a stacktrace Frame as a text string. The output is the
|
||||
// same as that of fmt.Sprintf("%+v", f), but without newlines or tabs.
|
||||
func (f Frame) MarshalText() ([]byte, error) {
|
||||
name := f.name()
|
||||
if name == "unknown" {
|
||||
return []byte(name), nil
|
||||
}
|
||||
return []byte(fmt.Sprintf("%s %s:%d", name, f.file(), f.line())), nil
|
||||
}
|
||||
|
||||
// StackTrace is stack of Frames from innermost (newest) to outermost (oldest).
|
||||
type StackTrace []Frame
|
||||
|
||||
|
|
@ -94,18 +110,32 @@ func (st StackTrace) Format(s fmt.State, verb rune) {
|
|||
switch {
|
||||
case s.Flag('+'):
|
||||
for _, f := range st {
|
||||
fmt.Fprintf(s, "\n%+v", f)
|
||||
io.WriteString(s, "\n")
|
||||
f.Format(s, verb)
|
||||
}
|
||||
case s.Flag('#'):
|
||||
fmt.Fprintf(s, "%#v", []Frame(st))
|
||||
default:
|
||||
fmt.Fprintf(s, "%v", []Frame(st))
|
||||
st.formatSlice(s, verb)
|
||||
}
|
||||
case 's':
|
||||
fmt.Fprintf(s, "%s", []Frame(st))
|
||||
st.formatSlice(s, verb)
|
||||
}
|
||||
}
|
||||
|
||||
// formatSlice will format this StackTrace into the given buffer as a slice of
|
||||
// Frame, only valid when called with '%s' or '%v'.
|
||||
func (st StackTrace) formatSlice(s fmt.State, verb rune) {
|
||||
io.WriteString(s, "[")
|
||||
for i, f := range st {
|
||||
if i > 0 {
|
||||
io.WriteString(s, " ")
|
||||
}
|
||||
f.Format(s, verb)
|
||||
}
|
||||
io.WriteString(s, "]")
|
||||
}
|
||||
|
||||
// stack represents a stack of program counters.
|
||||
type stack []uintptr
|
||||
|
||||
|
|
|
|||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue